001/*
002 *  Licensed to the Apache Software Foundation (ASF) under one
003 *  or more contributor license agreements.  See the NOTICE file
004 *  distributed with this work for additional information
005 *  regarding copyright ownership.  The ASF licenses this file
006 *  to you under the Apache License, Version 2.0 (the
007 *  "License"); you may not use this file except in compliance
008 *  with the License.  You may obtain a copy of the License at
009 *  
010 *    https://www.apache.org/licenses/LICENSE-2.0
011 *  
012 *  Unless required by applicable law or agreed to in writing,
013 *  software distributed under the License is distributed on an
014 *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
015 *  KIND, either express or implied.  See the License for the
016 *  specific language governing permissions and limitations
017 *  under the License. 
018 *  
019 */
020package org.apache.directory.api.ldap.schema.extractor.impl;
021
022
023import java.io.File;
024import java.io.FileNotFoundException;
025import java.io.IOException;
026import java.io.InputStream;
027import java.io.InvalidObjectException;
028import java.io.OutputStream;
029import java.io.OutputStreamWriter;
030import java.io.Writer;
031import java.net.URL;
032import java.nio.charset.Charset;
033import java.nio.file.Files;
034import java.nio.file.Paths;
035import java.util.ArrayDeque;
036import java.util.Deque;
037import java.util.Enumeration;
038import java.util.Map;
039import java.util.Map.Entry;
040import java.util.UUID;
041import java.util.regex.Pattern;
042
043import org.apache.directory.api.i18n.I18n;
044import org.apache.directory.api.ldap.model.constants.SchemaConstants;
045import org.apache.directory.api.ldap.model.exception.LdapException;
046import org.apache.directory.api.ldap.model.ldif.LdifEntry;
047import org.apache.directory.api.ldap.model.ldif.LdifReader;
048import org.apache.directory.api.ldap.schema.extractor.SchemaLdifExtractor;
049import org.apache.directory.api.ldap.schema.extractor.UniqueResourceException;
050import org.slf4j.Logger;
051import org.slf4j.LoggerFactory;
052
053
054/**
055 * Extracts LDIF files for the schema repository onto a destination directory.
056 *
057 * @author <a href="mailto:dev@directory.apache.org">Apache Directory Project</a>
058 */
059public class DefaultSchemaLdifExtractor implements SchemaLdifExtractor
060{
061    /** The base path. */
062    private static final String BASE_PATH = "";
063
064    /** The schema sub-directory. */
065    private static final String SCHEMA_SUBDIR = "schema";
066
067    /** The logger. */
068    private static final Logger LOG = LoggerFactory.getLogger( DefaultSchemaLdifExtractor.class );
069
070    /**
071     * The pattern to extract the schema from LDIF files.
072     * java.util.regex.Pattern is immutable so only one instance is needed for all uses.
073     */
074    private static final Pattern EXTRACT_PATTERN = Pattern.compile( ".*schema" + "[/\\Q\\\\E]" + "ou=schema.*\\.ldif" );
075
076    /** The extracted flag. */
077    private boolean extracted;
078
079    /** The output directory. */
080    private File outputDirectory;
081
082
083    /**
084     * Creates an extractor which deposits files into the specified output
085     * directory.
086     *
087     * @param outputDirectory the directory where the schema root is extracted
088     */
089    public DefaultSchemaLdifExtractor( File outputDirectory )
090    {
091        if ( LOG.isDebugEnabled() )
092        {
093            LOG.debug( I18n.msg( I18n.MSG_16000_BASE_PATH, BASE_PATH, outputDirectory ) );
094        }
095        
096        this.outputDirectory = outputDirectory;
097        File schemaDirectory = new File( outputDirectory, SCHEMA_SUBDIR );
098
099        if ( !outputDirectory.exists() )
100        {
101            if ( LOG.isDebugEnabled() )
102            {
103                LOG.debug( I18n.msg( I18n.MSG_16001_CREATING_DIR, outputDirectory ) );
104            }
105            
106            if ( !outputDirectory.mkdir() )
107            {
108                LOG.error( I18n.err( I18n.ERR_16042_OUTPUT_DIR_CREATION_FAIL, outputDirectory ) );
109            }
110        }
111        else
112        {
113            if ( LOG.isDebugEnabled() )
114            {
115                LOG.debug( I18n.msg( I18n.MSG_16002_DIR_EXISTS ) );
116            }
117        }
118
119        if ( !schemaDirectory.exists() )
120        {
121            if ( LOG.isInfoEnabled() )
122            {
123                LOG.info( I18n.msg( I18n.MSG_16004_SCHEMA_DIR_ABSENT, schemaDirectory ) );
124            }
125            
126            extracted = false;
127        }
128        else
129        {
130            if ( LOG.isInfoEnabled() )
131            {
132                LOG.info( I18n.msg( I18n.MSG_16005_SCHEMA_DIR_PRESENT, schemaDirectory ) );
133            }
134            
135            extracted = true;
136        }
137    }
138
139
140    /**
141     * Gets whether or not schema folder has been created or not.
142     *
143     * @return true if schema folder has already been extracted.
144     */
145    @Override
146    public boolean isExtracted()
147    {
148        return extracted;
149    }
150
151
152    /**
153     * Extracts the LDIF files from a Jar file or copies exploded LDIF resources.
154     *
155     * @param overwrite over write extracted structure if true, false otherwise
156     * @throws IOException if schema already extracted and on IO errors
157     */
158    @Override
159    public void extractOrCopy( boolean overwrite ) throws IOException
160    {
161        if ( !outputDirectory.exists() && !outputDirectory.mkdirs() )
162        {
163            throw new IOException( I18n.err( I18n.ERR_16006_DIRECTORY_CREATION_FAILED, outputDirectory
164                .getAbsolutePath() ) );
165        }
166
167        File schemaDirectory = new File( outputDirectory, SCHEMA_SUBDIR );
168
169        if ( !schemaDirectory.exists() )
170        {
171            if ( !schemaDirectory.mkdirs() )
172            {
173                throw new IOException( I18n.err( I18n.ERR_16006_DIRECTORY_CREATION_FAILED, schemaDirectory
174                    .getAbsolutePath() ) );
175            }
176        }
177        else if ( !overwrite )
178        {
179            throw new IOException( I18n.err( I18n.ERR_16000_CANNOT_OVEWRITE_SCHEMA, schemaDirectory.getAbsolutePath() ) );
180        }
181
182        Map<String, Boolean> list = ResourceMap.getResources( EXTRACT_PATTERN );
183
184        for ( Entry<String, Boolean> entry : list.entrySet() )
185        {
186            if ( entry.getValue() )
187            {
188                extractFromClassLoader( entry.getKey() );
189            }
190            else
191            {
192                File resource = new File( entry.getKey() );
193                copyFile( resource, getDestinationFile( resource ) );
194            }
195        }
196    }
197
198
199    /**
200     * Extracts the LDIF files from a Jar file or copies exploded LDIF
201     * resources without overwriting the resources if the schema has
202     * already been extracted.
203     *
204     * @throws IOException if schema already extracted and on IO errors
205     */
206    @Override
207    public void extractOrCopy() throws IOException
208    {
209        extractOrCopy( false );
210    }
211
212
213    /**
214     * Copies a file line by line from the source file argument to the 
215     * destination file argument.
216     *
217     * @param source the source file to copy
218     * @param destination the destination to copy the source to
219     * @throws IOException if there are IO errors or the source does not exist
220     */
221    private void copyFile( File source, File destination ) throws IOException
222    {
223        if ( LOG.isDebugEnabled() )
224        {
225            LOG.debug( I18n.msg( I18n.MSG_16003_COPYFILE, source, destination ) );
226        }
227
228        if ( !destination.getParentFile().exists() && !destination.getParentFile().mkdirs() )
229        {
230            throw new IOException( I18n.err( I18n.ERR_16006_DIRECTORY_CREATION_FAILED, destination.getParentFile()
231                .getAbsolutePath() ) );
232        }
233
234        if ( !source.getParentFile().exists() )
235        {
236            throw new FileNotFoundException( I18n.err( I18n.ERR_16001_CANNOT_COPY_NON_EXISTENT, source.getAbsolutePath() ) );
237        }
238
239        try ( Writer out = new OutputStreamWriter( Files.newOutputStream( Paths.get( destination.getPath() ) ), 
240            Charset.defaultCharset() );
241            LdifReader ldifReader = new LdifReader( source ) )
242        {
243            boolean first = true;
244            LdifEntry ldifEntry = null;
245
246            while ( ldifReader.hasNext() )
247            {
248                if ( first )
249                {
250                    ldifEntry = ldifReader.next();
251
252                    if ( ldifEntry.get( SchemaConstants.ENTRY_UUID_AT ) == null )
253                    {
254                        // No UUID, let's create one
255                        UUID entryUuid = UUID.randomUUID();
256                        ldifEntry.addAttribute( SchemaConstants.ENTRY_UUID_AT, entryUuid.toString() );
257                    }
258
259                    first = false;
260                }
261                else
262                {
263                    // throw an exception : we should not have more than one entry per schema ldif file
264                    String msg = I18n.err( I18n.ERR_16002_MORE_THAN_ONE_ENTRY, source );
265                    LOG.error( msg );
266                    throw new InvalidObjectException( msg );
267                }
268            }
269
270            // Add the version at the first line, to avoid a warning
271            String ldifString;
272            
273            if ( ldifEntry != null )
274            {
275                ldifString = "version: 1\n" + ldifEntry.toString();
276            }
277            else
278            {
279                ldifString = "version: 1\n";
280            }
281
282            out.write( ldifString );
283            out.flush();
284        }
285        catch ( LdapException le )
286        {
287            String msg = I18n.err( I18n.ERR_16003_ERROR_PARSING_LDIF, source, le.getLocalizedMessage() );
288            LOG.error( msg );
289            throw new InvalidObjectException( msg );
290        }
291    }
292
293
294    /**
295     * Assembles the destination file by appending file components previously
296     * pushed on the fileComponentStack argument.
297     *
298     * @param fileComponentStack stack containing pushed file components
299     * @return the assembled destination file
300     */
301    private File assembleDestinationFile( Deque<String> fileComponentStack )
302    {
303        File destinationFile = outputDirectory.getAbsoluteFile();
304
305        while ( !fileComponentStack.isEmpty() )
306        {
307            destinationFile = new File( destinationFile, fileComponentStack.pop() );
308        }
309
310        return destinationFile;
311    }
312
313
314    /**
315     * Calculates the destination file.
316     *
317     * @param resource the source file
318     * @return the destination file's parent directory
319     */
320    private File getDestinationFile( File resource )
321    {
322        File parent = resource.getParentFile();
323        Deque<String> fileComponentStack = new ArrayDeque<>();
324        fileComponentStack.push( resource.getName() );
325
326        while ( parent != null )
327        {
328            if ( "schema".equals( parent.getName() ) )
329            {
330                // All LDIF files besides the schema.ldif are under the 
331                // schema/schema base path. So we need to add one more 
332                // schema component to all LDIF files minus this schema.ldif
333                fileComponentStack.push( "schema" );
334
335                return assembleDestinationFile( fileComponentStack );
336            }
337
338            fileComponentStack.push( parent.getName() );
339
340            if ( parent.equals( parent.getParentFile() ) || parent.getParentFile() == null )
341            {
342                throw new IllegalStateException( I18n.err( I18n.ERR_16004_ROOT_WITHOUT_SCHEMA ) );
343            }
344
345            parent = parent.getParentFile();
346        }
347
348        throw new IllegalStateException( I18n.err( I18n.ERR_16005_PARENT_NULL ) );
349    }
350
351
352    /**
353     * Gets the unique schema file resource from the class loader off the base path.  If 
354     * the same resource exists multiple times then an error will result since the resource
355     * is not unique.
356     *
357     * @param resourceName the file name of the resource to load
358     * @param resourceDescription human description of the resource
359     * @return the InputStream to read the contents of the resource
360     * @throws IOException if there are problems reading or finding a unique copy of the resource
361     */
362    public static InputStream getUniqueResourceAsStream( String resourceName, String resourceDescription )
363        throws IOException
364    {
365        URL result = getUniqueResource( BASE_PATH + resourceName, resourceDescription );
366        
367        return result.openStream();
368    }
369
370
371    /**
372     * Gets a unique resource from the class loader.
373     * 
374     * @param resourceName the name of the resource
375     * @param resourceDescription the description of the resource
376     * @return the URL to the resource in the class loader
377     * @throws IOException if there is an IO error
378     */
379    public static URL getUniqueResource( String resourceName, String resourceDescription ) throws IOException
380    {
381        Enumeration<URL> resources = DefaultSchemaLdifExtractor.class.getClassLoader().getResources( resourceName );
382        if ( !resources.hasMoreElements() )
383        {
384            throw new UniqueResourceException( resourceName, resourceDescription );
385        }
386        URL result = resources.nextElement();
387        if ( resources.hasMoreElements() )
388        {
389            throw new UniqueResourceException( resourceName, result, resources, resourceDescription );
390        }
391        return result;
392    }
393
394
395    /**
396     * Extracts the LDIF schema resource from class loader.
397     *
398     * @param resource the LDIF schema resource
399     * @throws IOException if there are IO errors
400     */
401    private void extractFromClassLoader( String resource ) throws IOException
402    {
403        byte[] buf = new byte[512];
404        
405        try ( InputStream in = DefaultSchemaLdifExtractor.getUniqueResourceAsStream( resource,
406            "LDIF file in schema repository" ) )
407        {
408            File destination = new File( outputDirectory, resource );
409
410            /*
411             * Do not overwrite an LDIF file if it has already been extracted.
412             */
413            if ( destination.exists() )
414            {
415                return;
416            }
417
418            if ( !destination.getParentFile().exists() && !destination.getParentFile().mkdirs() )
419            {
420                throw new IOException( I18n.err( I18n.ERR_16006_DIRECTORY_CREATION_FAILED, destination
421                    .getParentFile().getAbsolutePath() ) );
422            }
423
424            try ( OutputStream out = Files.newOutputStream( Paths.get( destination.getPath() ) ) )
425            
426            {
427                while ( in.available() > 0 )
428                {
429                    int readCount = in.read( buf );
430                    out.write( buf, 0, readCount );
431                }
432
433                out.flush();
434            }
435        }
436    }
437}