001 /* 002 * Licensed to the Apache Software Foundation (ASF) under one 003 * or more contributor license agreements. See the NOTICE file 004 * distributed with this work for additional information 005 * regarding copyright ownership. The ASF licenses this file 006 * to you under the Apache License, Version 2.0 (the 007 * "License"); you may not use this file except in compliance 008 * with the License. You may obtain a copy of the License at 009 * 010 * http://www.apache.org/licenses/LICENSE-2.0 011 * 012 * Unless required by applicable law or agreed to in writing, 013 * software distributed under the License is distributed on an 014 * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 015 * KIND, either express or implied. See the License for the 016 * specific language governing permissions and limitations 017 * under the License. 018 * 019 */ 020 package org.apache.directory.server.tools; 021 022 023 import java.io.File; 024 import java.io.FileWriter; 025 import java.io.PrintWriter; 026 import java.util.ArrayList; 027 import java.util.HashSet; 028 import java.util.List; 029 import java.util.Set; 030 031 import javax.naming.NamingEnumeration; 032 import javax.naming.directory.Attribute; 033 import javax.naming.directory.Attributes; 034 035 import jdbm.helper.MRU; 036 import jdbm.recman.BaseRecordManager; 037 import jdbm.recman.CacheRecordManager; 038 039 import org.apache.commons.cli.CommandLine; 040 import org.apache.commons.cli.Option; 041 import org.apache.commons.cli.Options; 042 import org.apache.directory.server.core.DefaultDirectoryService; 043 import org.apache.directory.server.core.DirectoryService; 044 import org.apache.directory.server.core.partition.impl.btree.jdbm.JdbmIndex; 045 import org.apache.directory.server.core.partition.impl.btree.jdbm.JdbmMasterTable; 046 import org.apache.directory.server.i18n.I18n; 047 import org.apache.directory.server.xdbm.Tuple; 048 import org.apache.directory.shared.ldap.MultiException; 049 import org.apache.directory.shared.ldap.cursor.Cursor; 050 import org.apache.directory.shared.ldap.entry.ServerEntry; 051 import org.apache.directory.shared.ldap.exception.LdapConfigurationException; 052 import org.apache.directory.shared.ldap.ldif.LdifUtils; 053 import org.apache.directory.shared.ldap.schema.AttributeType; 054 import org.apache.directory.shared.ldap.schema.SchemaManager; 055 import org.apache.directory.shared.ldap.schema.UsageEnum; 056 import org.apache.directory.shared.ldap.schema.ldif.extractor.SchemaLdifExtractor; 057 import org.apache.directory.shared.ldap.schema.ldif.extractor.impl.DefaultSchemaLdifExtractor; 058 import org.apache.directory.shared.ldap.schema.loader.ldif.LdifSchemaLoader; 059 import org.apache.directory.shared.ldap.schema.manager.impl.DefaultSchemaManager; 060 import org.apache.directory.shared.ldap.util.Base64; 061 import org.apache.directory.shared.ldap.util.ExceptionUtils; 062 063 064 /** 065 * Simple tool used to dump the contents of a jdbm based partition. 066 * 067 * @author <a href="mailto:dev@directory.apache.org">Apache Directory Project</a> 068 * @version $Rev: 493916 $ 069 */ 070 public class DumpCommand extends ToolCommand 071 { 072 private SchemaManager schemaManager; 073 private Set<String> exclusions = new HashSet<String>(); 074 private boolean includeOperational = false; 075 076 077 public DumpCommand() 078 { 079 super( "dump" ); 080 } 081 082 083 private SchemaManager loadSchemaManager() throws Exception 084 { 085 // -------------------------------------------------------------------- 086 // Load the bootstrap schemas to start up the schema partition 087 // -------------------------------------------------------------------- 088 089 // setup temporary loader and temp registry 090 String workingDirectory = System.getProperty( "workingDirectory" ); 091 092 if ( workingDirectory == null ) 093 { 094 String path = DumpCommand.class.getResource( "" ).getPath(); 095 int targetPos = path.indexOf( "target" ); 096 workingDirectory = path.substring( 0, targetPos + 6 ); 097 } 098 099 File schemaRepository = new File( workingDirectory, "schema" ); 100 SchemaLdifExtractor extractor = new DefaultSchemaLdifExtractor( new File( workingDirectory ) ); 101 extractor.extractOrCopy(); 102 LdifSchemaLoader loader = new LdifSchemaLoader( schemaRepository ); 103 schemaManager = new DefaultSchemaManager( loader ); 104 schemaManager.loadAllEnabled(); 105 106 List<Throwable> errors = schemaManager.getErrors(); 107 108 if ( errors.size() != 0 ) 109 { 110 throw new Exception( I18n.err( I18n.ERR_317, ExceptionUtils.printErrors( errors ) ) ); 111 } 112 113 114 schemaManager.loadWithDeps( "collective" ); 115 116 errors = schemaManager.getErrors(); 117 118 if ( !errors.isEmpty() ) 119 { 120 MultiException e = new MultiException(); 121 for ( Throwable t : errors ) 122 { 123 e.addThrowable( t ); 124 } 125 126 throw e; 127 } 128 129 // -------------------------------------------------------------------- 130 // Initialize schema partition or bomb out if we cannot find it on disk 131 // -------------------------------------------------------------------- 132 133 // If not present then we need to abort 134 File schemaDirectory = new File( getInstanceLayout().getPartitionsDir(), "schema" ); 135 136 if ( !schemaDirectory.exists() ) 137 { 138 throw new LdapConfigurationException( I18n.err( I18n.ERR_697, schemaDirectory ) ); 139 } 140 141 DirectoryService directoryService = new DefaultDirectoryService(); 142 //schemaPartition.init( directoryService ); 143 144 // -------------------------------------------------------------------- 145 // Initialize schema subsystem and reset registries 146 // -------------------------------------------------------------------- 147 // PartitionSchemaLoader schemaLoader = new PartitionSchemaLoader( schemaPartition, registries ); 148 // schemaLoader.loadEnabled( globalRegistries ); 149 // SerializableComparator.setRegistry( globalRegistries.getComparatorRegistry() ); 150 return schemaManager; 151 } 152 153 154 public void execute( CommandLine cmdline ) throws Exception 155 { 156 getLayout().verifyInstallation(); 157 schemaManager = loadSchemaManager(); 158 159 includeOperational = cmdline.hasOption( 'o' ); 160 String[] partitions = cmdline.getOptionValues( 'p' ); 161 String outputFile = cmdline.getOptionValue( 'f' ); 162 PrintWriter out = null; 163 164 String[] excludedAttributes = cmdline.getOptionValues( 'e' ); 165 166 if ( excludedAttributes != null ) 167 { 168 for ( String attributeType:excludedAttributes) 169 { 170 AttributeType type = schemaManager.lookupAttributeTypeRegistry( attributeType ); 171 exclusions.add( type.getName() ); 172 } 173 } 174 175 if ( outputFile == null ) 176 { 177 out = new PrintWriter( System.out ); 178 } 179 else 180 { 181 out = new PrintWriter( new FileWriter( outputFile ) ); 182 } 183 184 for ( String partition:partitions ) 185 { 186 File partitionDirectory = new File( getInstanceLayout().getPartitionsDir(), partition ); 187 out.println( "\n\n" ); 188 dump( partitionDirectory, out ); 189 } 190 } 191 192 193 private void dump( File partitionDirectory, PrintWriter out ) throws Exception 194 { 195 if ( !partitionDirectory.exists() ) 196 { 197 System.err.println( I18n.err( I18n.ERR_196, partitionDirectory ) ); 198 System.exit( 1 ); 199 } 200 201 out.println( "# ========================================================================" ); 202 out.println( "# ApacheDS Tools Version: " + getVersion() ); 203 out.println( "# Partition Directory: " + partitionDirectory ); 204 out.println( "# ========================================================================\n\n" ); 205 206 String path = partitionDirectory.getPath() + File.separator + "master"; 207 BaseRecordManager base = new BaseRecordManager( path ); 208 base.disableTransactions(); 209 CacheRecordManager recMan = new CacheRecordManager( base, new MRU( 1000 ) ); 210 211 JdbmMasterTable<ServerEntry> master = new JdbmMasterTable<ServerEntry>( recMan, schemaManager ); 212 AttributeType attributeType = schemaManager.lookupAttributeTypeRegistry( "apacheUpdn" ); 213 JdbmIndex idIndex = new JdbmIndex(); 214 idIndex.setAttributeId( attributeType.getName() ); 215 idIndex.setWkDirPath( partitionDirectory ); 216 idIndex.setCacheSize( 1000 ); 217 idIndex.setNumDupLimit( 1000 ); 218 idIndex.init( schemaManager, attributeType, partitionDirectory ); 219 220 out.println( "#---------------------" ); 221 Cursor<Tuple<Long,ServerEntry>> list = master.cursor(); 222 StringBuffer buf = new StringBuffer(); 223 224 while ( list.next() ) 225 { 226 Tuple<Long,ServerEntry> tuple = list.get(); 227 Long id = tuple.getKey(); 228 String dn = ( String ) idIndex.reverseLookup( id ); 229 Attributes entry = ( Attributes ) tuple.getValue(); 230 231 filterAttributes( dn, entry ); 232 233 buf.append( "# Entry: " ).append( id ).append( "\n#---------------------\n\n" ); 234 235 if ( !LdifUtils.isLDIFSafe( dn ) ) 236 { 237 // If the DN isn't LdifSafe, it needs to be Base64 encoded. 238 239 buf.append( "dn:: " ).append( new String( Base64.encode( dn.getBytes() ) ) ); 240 } 241 else 242 { 243 buf.append( "dn: " ).append( dn ); 244 } 245 246 buf.append( "\n" ).append( LdifUtils.convertToLdif( entry ) ); 247 248 if ( list.next() ) 249 { 250 buf.append( "\n\n#---------------------\n" ); 251 } 252 253 out.print( buf.toString() ); 254 out.flush(); 255 buf.setLength( 0 ); 256 } 257 } 258 259 260 private void filterAttributes( String dn, Attributes entry ) throws Exception 261 { 262 List<String> toRemove = new ArrayList<String>(); 263 NamingEnumeration<? extends Attribute> attrs = entry.getAll(); 264 265 while ( attrs.hasMore() ) 266 { 267 Attribute attr = attrs.next(); 268 269 if ( !schemaManager.getAttributeTypeRegistry().contains( attr.getID() ) ) 270 { 271 if ( !isQuietEnabled() ) 272 { 273 System.out 274 .println( "# Cannot properly filter unrecognized attribute " + attr.getID() + " in " + dn ); 275 } 276 continue; 277 } 278 279 AttributeType type = schemaManager.lookupAttributeTypeRegistry( attr.getID() ); 280 boolean isOperational = type.getUsage() != UsageEnum.USER_APPLICATIONS; 281 282 if ( exclusions.contains( attr.getID() ) || ( isOperational && ( !includeOperational ) ) ) 283 { 284 toRemove.add( attr.getID() ); 285 } 286 } 287 288 for ( String id:toRemove ) 289 { 290 entry.remove( id ); 291 292 if ( isDebugEnabled() ) 293 { 294 System.out.println( "# Excluding attribute " + id + " in " + dn ); 295 } 296 } 297 } 298 299 300 public Options getOptions() 301 { 302 Options opts = new Options(); 303 Option op = new Option( "f", "file", true, "file to output the dump to" ); 304 op.setRequired( false ); 305 opts.addOption( op ); 306 op = new Option( "p", "partitions", true, "the partitions to dump" ); 307 op.setRequired( true ); 308 op.setValueSeparator( File.pathSeparatorChar ); 309 opts.addOption( op ); 310 op = new Option( "e", "excluded-attributes", true, "the attributes to exclude" ); 311 op.setRequired( false ); 312 op.setValueSeparator( File.pathSeparatorChar ); 313 opts.addOption( op ); 314 op = new Option( "o", "include-operational", false, "include operational attributes: defaults to false" ); 315 op.setRequired( false ); 316 opts.addOption( op ); 317 op = new Option( "i", "install-path", true, "path to apacheds installation directory" ); 318 op.setRequired( true ); 319 opts.addOption( op ); 320 return opts; 321 } 322 }