首先要把apacheds-all-1.5.7.jar 抓下來
建立一個Class, MyActavitor.java
package com.embed.ldap.service; public class MyActavitor { private EmbeddedADSVer157 embeddedAds; public void setEmbeddedAds(EmbeddedADSVer157 embeddedAds) { this.embeddedAds = embeddedAds; } public void start() throws Exception { System.out.println("Hello Spring OSGi World!! "); embeddedAds.startServer(); } public void stop() throws Exception { System.out.println("Goodbye Spring OSGi World!!"); embeddedAds.stopServer(); } /* public static void main( String[] args ) { try { File workDir = new File( System.getProperty( "java.io.tmpdir" ) "/server-work" ); workDir.mkdirs(); // Create the server EmbeddedADSVer157 ads = new EmbeddedADSVer157( workDir ); // Read an entry Entry result = ads.service.getAdminSession().lookup( new DN( "dc=apache,dc=org" ) ); // And print it if available System.out.println( "Found entry : " result ); // optionally we can start a server too ads.startServer(); System.out.println("for test "); } catch ( Exception e ) { // Ok, we have something wrong going on ... e.printStackTrace(); } }*/ }
這個Example 從ApacheDS官網上給的,透過startServer()可以將LDAP Server打開
EmbeddedADSVer157.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package com.embed.ldap.service; import java.io.File; import java.util.HashSet; import java.util.List; import org.apache.directory.server.constants.ServerDNConstants; import org.apache.directory.server.core.DefaultDirectoryService; import org.apache.directory.server.core.DirectoryService; import org.apache.directory.server.core.partition.Partition; import org.apache.directory.server.core.partition.impl.btree.jdbm.JdbmIndex; import org.apache.directory.server.core.partition.impl.btree.jdbm.JdbmPartition; import org.apache.directory.server.core.partition.ldif.LdifPartition; import org.apache.directory.server.core.schema.SchemaPartition; import org.apache.directory.server.ldap.LdapServer; import org.apache.directory.server.protocol.shared.transport.TcpTransport; import org.apache.directory.server.xdbm.Index; import org.apache.directory.shared.ldap.entry.Entry; import org.apache.directory.shared.ldap.entry.ServerEntry; import org.apache.directory.shared.ldap.exception.LdapException; import org.apache.directory.shared.ldap.name.DN; import org.apache.directory.shared.ldap.schema.SchemaManager; import org.apache.directory.shared.ldap.schema.ldif.extractor.SchemaLdifExtractor; import org.apache.directory.shared.ldap.schema.ldif.extractor.impl.DefaultSchemaLdifExtractor; import org.apache.directory.shared.ldap.schema.loader.ldif.LdifSchemaLoader; import org.apache.directory.shared.ldap.schema.manager.impl.DefaultSchemaManager; import org.apache.directory.shared.ldap.schema.registries.SchemaLoader; import org.springframework.beans.factory.annotation.Configurable; import org.springframework.core.io.FileSystemResource; import org.springframework.core.io.Resource; import com.embed.ldap.service.internal.ContextResourceLoader; /** * A simple example exposing how to embed Apache Directory Server version 1.5.7 * into an application. * * @author <a href="mailto:dev@directory.apache.org">Apache Directory * Project</a> * @version $Rev$, $Date$ */ @Configurable(preConstruction=true) public class EmbeddedADSVer157 { /** The directory service */ public DirectoryService service; /** The LDAP server */ private LdapServer server; private ContextResourceLoader ctxRsLoader; /** * Add a new partition to the server * * @param partitionId * The partition Id * @param partitionDn * The partition DN * @return The newly added partition * @throws Exception * If the partition can't be added */ private Partition addPartition(String partitionId, String partitionDn) throws Exception { // Create a new partition named 'foo'. JdbmPartition partition = new JdbmPartition(); partition.setId(partitionId); partition.setPartitionDir(new File(service.getWorkingDirectory(), partitionId)); partition.setSuffix(partitionDn); service.addPartition(partition); return partition; } /** * Add a new set of index on the given attributes * * @param partition * The partition on which we want to add index * @param attrs * The list of attributes to index */ private void addIndex(Partition partition, String... attrs) { // Index some attributes on the apache partition HashSet<Index<?, ServerEntry, Long>> indexedAttributes = new HashSet<Index<?, ServerEntry, Long>>(); for (String attribute : attrs) { indexedAttributes .add(new JdbmIndex<String, ServerEntry>(attribute)); } ((JdbmPartition) partition).setIndexedAttributes(indexedAttributes); } /** * initialize the schema manager and add the schema partition to diectory * service * * @throws Exception * if the schema LDIF files are not found on the classpath */ private void initSchemaPartition() throws Exception { SchemaPartition schemaPartition = service.getSchemaService().getSchemaPartition(); // Init the LdifPartition LdifPartition ldifPartition = new LdifPartition(); String workingDirectory = service.getWorkingDirectory().getAbsolutePath(); ldifPartition.setWorkingDirectory(workingDirectory "/schema"); System.out.println("initSchemaPartition workingDirectory ->>" workingDirectory); // Extract the schema on disk (a brand new one) and load the registries File schemaRepository = new File(workingDirectory, "schema"); System.out.println("133 - " schemaRepository.getAbsolutePath()); SchemaLdifExtractor extractor = new MyDefaultSchemaLdifExtractor(new File(workingDirectory)); System.out.println("136 - " extractor); extractor.extractOrCopy(true); System.out.println("138 - extractor.extractOrCopy"); schemaPartition.setWrappedPartition(ldifPartition); System.out.println("141 - setWrappedPartition"); SchemaLoader loader = new LdifSchemaLoader(schemaRepository); System.out.println("142 -loader " loader); SchemaManager schemaManager = new DefaultSchemaManager(loader); service.setSchemaManager(schemaManager); // We have to load the schema now, otherwise we won't be able // to initialize the Partitions, as we won't be able to parse // and normalize their suffix DN schemaManager.loadAllEnabled(); System.out.println("150"); schemaPartition.setSchemaManager(schemaManager); List<Throwable> errors = schemaManager.getErrors(); if (errors.size() != 0) { throw new Exception("Schema load failed : " errors); } } /** * Initialize the server. It creates the partition, adds the index, and * injects the context entries for the created partitions. * * @param workDir * the directory to be used for storing the data * @throws Exception * if there were some problems while initializing the system */ private void initDirectoryService(File workDir) throws Exception { System.out.println("initDirectoryService workdir =" workDir.getAbsolutePath()); // Initialize the LDAP service service = new DefaultDirectoryService(); service.setWorkingDirectory(workDir); // first load the schema initSchemaPartition(); // then the system partition // this is a MANDATORY partition Partition systemPartition = addPartition("system",ServerDNConstants.SYSTEM_DN); service.setSystemPartition(systemPartition); // Disable the ChangeLog system service.getChangeLog().setEnabled(false); service.setDenormalizeOpAttrsEnabled(true); // Now we can create as many partitions as we need // Create some new partitions named 'foo', 'bar' and 'apache'. Partition fooPartition = addPartition("foo", "dc=foo,dc=com"); Partition barPartition = addPartition("bar", "dc=bar,dc=com"); Partition apachePartition = addPartition("apache", "dc=apache,dc=org"); // Index some attributes on the apache partition addIndex(apachePartition, "objectClass", "ou", "uid"); // And start the service service.startup(); // Inject the foo root entry if it does not already exist try { service.getAdminSession().lookup(fooPartition.getSuffixDn()); } catch (LdapException lnnfe) { DN dnFoo = new DN("dc=foo,dc=com"); ServerEntry entryFoo = service.newEntry(dnFoo); entryFoo.add("objectClass", "top", "domain", "extensibleObject"); entryFoo.add("dc", "foo"); service.getAdminSession().add(entryFoo); } // Inject the bar root entry try { service.getAdminSession().lookup(barPartition.getSuffixDn()); } catch (LdapException lnnfe) { DN dnBar = new DN("dc=bar,dc=com"); ServerEntry entryBar = service.newEntry(dnBar); entryBar.add("objectClass", "top", "domain", "extensibleObject"); entryBar.add("dc", "bar"); service.getAdminSession().add(entryBar); } // Inject the apache root entry if (!service.getAdminSession().exists(apachePartition.getSuffixDn())) { DN dnApache = new DN("dc=Apache,dc=Org"); ServerEntry entryApache = service.newEntry(dnApache); entryApache.add("objectClass", "top", "domain", "extensibleObject"); entryApache.add("dc", "Apache"); service.getAdminSession().add(entryApache); } // We are all done ! } /** * Creates a new instance of EmbeddedADS. It initializes the directory * service. * * @throws Exception * If something went wrong */ public EmbeddedADSVer157(String filePath,ContextResourceLoader ctxRsLoader) throws Exception { this.ctxRsLoader = ctxRsLoader; System.out.println("filePath =" filePath); System.out.println("ctxRsLoader = " ctxRsLoader); File rs = ctxRsLoader.getResourceFile(filePath); File rs2 = new File("/LDAP/server-work"); System.out.println("rs2 =" rs2.getAbsolutePath()); System.out.println("rs = " rs " , " rs.getName()); File workDir = new File( System.getProperty( "java.io.tmpdir" ) "/server-work" ); System.out.println("schema.resource.location =" System.getProperty("java.class.path")); // File workDir = new File(filePath); initDirectoryService(workDir); } /** * starts the LdapServer * * @throws Exception */ public void startServer() throws Exception { Entry result = this.service.getAdminSession().lookup(new DN("dc=apache,dc=org")); server = new LdapServer(); int serverPort = 10389; server.setTransports(new TcpTransport(serverPort)); server.setDirectoryService(service); server.start(); } public void stopServer() { server.stop(); } /* * public static void main( String[] args ) { try { File workDir = new File( * System.getProperty( "java.io.tmpdir" ) "/server-work" ); * workDir.mkdirs(); * * // Create the server EmbeddedADSVer157 ads = new EmbeddedADSVer157( * workDir ); * * // Read an entry Entry result = ads.service.getAdminSession().lookup( new * DN( "dc=apache,dc=org" ) ); * * // And print it if available System.out.println( "Found entry : " * result ); * * // optionally we can start a server too ads.startServer(); * * System.out.println("for test "); } catch ( Exception e ) { // Ok, we have * something wrong going on ... e.printStackTrace(); } } */ }
建立一個MyDefaultSchemaLdifExtractor.java
package com.embed.ldap.service; import java.io.File; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.FileWriter; import java.io.IOException; import java.io.InputStream; import java.io.InvalidObjectException; import java.net.URL; import java.util.Enumeration; import java.util.Map; import java.util.Stack; import java.util.UUID; import java.util.Map.Entry; import java.util.regex.Pattern; import org.apache.directory.shared.i18n.I18n; import org.apache.directory.shared.ldap.constants.SchemaConstants; import org.apache.directory.shared.ldap.exception.LdapException; import org.apache.directory.shared.ldap.ldif.LdapLdifException; import org.apache.directory.shared.ldap.ldif.LdifEntry; import org.apache.directory.shared.ldap.ldif.LdifReader; import org.apache.directory.shared.ldap.schema.ldif.extractor.SchemaLdifExtractor; import org.apache.directory.shared.ldap.schema.ldif.extractor.UniqueResourceException; //import org.apache.directory.shared.ldap.schema.ldif.extractor.impl.ResourceMap; import com.embed.ldap.service.ResourceMap; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class MyDefaultSchemaLdifExtractor implements SchemaLdifExtractor{ private static final String BASE_PATH = ""; private static final String SCHEMA_SUBDIR = "schema"; private static final Logger LOG = LoggerFactory.getLogger( MyDefaultSchemaLdifExtractor.class ); private boolean extracted; private File outputDirectory; private File schemaDirectory; /** * Creates an extractor which deposits files into the specified output * directory. * * @param outputDirectory the directory where the schema root is extracted */ public MyDefaultSchemaLdifExtractor( File outputDirectory ) { LOG.debug( "BASE_PATH set to {}, outputDirectory set to {}", BASE_PATH, outputDirectory ); this.outputDirectory = outputDirectory; this.schemaDirectory = new File( outputDirectory, SCHEMA_SUBDIR ); if ( ! outputDirectory.exists() ) { LOG.debug( "Creating output directory: {}", outputDirectory ); if( ! outputDirectory.mkdir() ) { LOG.error( "Failed to create outputDirectory: {}", outputDirectory ); } } else { LOG.debug( "Output directory exists: no need to create." ); } if ( ! schemaDirectory.exists() ) { System.out.println("->>>> does NOT exist extracted set to false"); LOG.info( "Schema directory '{}' does NOT exist: extracted state set to false.", schemaDirectory ); extracted = false; } else { System.out.println("->>>> does NOT exist extracted set to true"); LOG.info( "Schema directory '{}' does exist: extracted state set to true.", schemaDirectory ); extracted = true; } } /** * Gets whether or not schema folder has been created or not. * * @return true if schema folder has already been extracted. */ public boolean isExtracted() { return extracted; } /** * Extracts the LDIF files from a Jar file or copies exploded LDIF resources. * * @param overwrite over write extracted structure if true, false otherwise * @throws IOException if schema already extracted and on IO errors */ public void extractOrCopy( boolean overwrite ) throws IOException { System.out.println("-->start extractOrCopy"); System.out.println("--> outputDirectory.exists()" outputDirectory.exists()); if ( ! outputDirectory.exists() ) { outputDirectory.mkdir(); } File schemaDirectory = new File( outputDirectory, SCHEMA_SUBDIR ); System.out.println("--> schemaDirectory(getAbsolutePath)=" schemaDirectory.getAbsolutePath()); System.out.println("--> schemaDirectory.exists()=" schemaDirectory.exists()); if ( ! schemaDirectory.exists() ) { schemaDirectory.mkdir(); } else if ( ! overwrite ) { System.out.println("--> else if ( ! overwrite )"); throw new IOException( I18n.err( I18n.ERR_08001, schemaDirectory.getAbsolutePath() ) ); } Pattern pattern = Pattern.compile( ".*schema/ou=schema.*\\.ldif" ); Map<String,Boolean> list = ResourceMap.getResources( pattern ); System.out.println("ResourceMap size = " list.size()); for ( Entry<String,Boolean> entry : list.entrySet() ) { System.out.println("--> entry.getValue() =" entry.getValue()); if ( entry.getValue() ) { System.out.println("---> jar"); extractFromJar( entry.getKey() ); } else { System.out.println("---> not jar"); File resource = new File( entry.getKey() ); copyFile( resource, getDestinationFile( resource ) ); } } } /** * Extracts the LDIF files from a Jar file or copies exploded LDIF * resources without overwriting the resources if the schema has * already been extracted. * * @throws IOException if schema already extracted and on IO errors */ public void extractOrCopy() throws IOException { extractOrCopy( false ); } /** * Copies a file line by line from the source file argument to the * destination file argument. * * @param source the source file to copy * @param destination the destination to copy the source to * @throws IOException if there are IO errors or the source does not exist */ private void copyFile( File source, File destination ) throws IOException { LOG.debug( "copyFile(): source = {}, destination = {}", source, destination ); if ( ! destination.getParentFile().exists() ) { destination.getParentFile().mkdirs(); } if ( ! source.getParentFile().exists() ) { throw new FileNotFoundException( I18n.err( I18n.ERR_08002, source.getAbsolutePath() ) ); } FileWriter out = new FileWriter( destination ); try { LdifReader ldifReader = new LdifReader( source ); boolean first = true; LdifEntry ldifEntry = null; while ( ldifReader.hasNext() ) { if ( first ) { ldifEntry = ldifReader.next(); if ( ldifEntry.get( SchemaConstants.ENTRY_UUID_AT ) == null ) { // No UUID, let's create one UUID entryUuid = UUID.randomUUID(); ldifEntry.addAttribute( SchemaConstants.ENTRY_UUID_AT, entryUuid.toString() ); } first = false; } else { // throw an exception : we should not have more than one entry per schema ldif file String msg = I18n.err( I18n.ERR_08003, source ); LOG.error( msg ); throw new InvalidObjectException( msg ); } } ldifReader.close(); // Add the version at the first line, to avoid a warning String ldifString = "version: 1\n" ldifEntry.toString(); out.write( ldifString ); out.flush(); } catch ( LdapLdifException ne ) { // throw an exception : we should not have more than one entry per schema ldif file String msg = I18n.err( I18n.ERR_08004, source, ne.getLocalizedMessage() ); LOG.error( msg ); throw new InvalidObjectException( msg ); } catch ( LdapException ne ) { // throw an exception : we should not have more than one entry per schema ldif file String msg = I18n.err( I18n.ERR_08004, source, ne.getLocalizedMessage() ); LOG.error( msg ); throw new InvalidObjectException( msg ); } finally { out.close(); } } /** * Assembles the destination file by appending file components previously * pushed on the fileComponentStack argument. * * @param fileComponentStack stack containing pushed file components * @return the assembled destination file */ private File assembleDestinationFile( Stack<String> fileComponentStack ) { File destinationFile = outputDirectory.getAbsoluteFile(); while ( ! fileComponentStack.isEmpty() ) { destinationFile = new File( destinationFile, fileComponentStack.pop() ); } return destinationFile; } /** * Calculates the destination file. * * @param resource the source file * @return the destination file's parent directory */ private File getDestinationFile( File resource ) { File parent = resource.getParentFile(); Stack<String> fileComponentStack = new Stack<String>(); fileComponentStack.push( resource.getName() ); while ( parent != null ) { if ( parent.getName().equals( "schema" ) ) { // All LDIF files besides the schema.ldif are under the // schema/schema base path. So we need to add one more // schema component to all LDIF files minus this schema.ldif fileComponentStack.push( "schema" ); return assembleDestinationFile( fileComponentStack ); } fileComponentStack.push( parent.getName() ); if ( parent.equals( parent.getParentFile() ) || parent.getParentFile() == null ) { throw new IllegalStateException( I18n.err( I18n.ERR_08005 ) ); } parent = parent.getParentFile(); } /* this seems retarded so I replaced it for now with what is below it will not break from loop above unless parent == null so the if is never executed - just the else is executed every time if ( parent != null ) { return assembleDestinationFile( fileComponentStack ); } else { throw new IllegalStateException( "parent cannot be null" ); } */ throw new IllegalStateException( I18n.err( I18n.ERR_08006 ) ); } /** * Gets the DBFILE resource from within a jar off the base path. If another jar * with such a DBFILE resource exists then an error will result since the resource * is not unique across all the jars. * * @param resourceName the file name of the resource to load * @param resourceDescription human description of the resource * @return the InputStream to read the contents of the resource * @throws IOException if there are problems reading or finding a unique copy of the resource */ public static InputStream getUniqueResourceAsStream( String resourceName, String resourceDescription ) throws IOException { resourceName = BASE_PATH resourceName; URL result = getUniqueResource( resourceName, resourceDescription ); return result.openStream(); } /** * Gets a unique resource from a Jar file. * * @param resourceName the name of the resource * @param resourceDescription the description of the resource * @return the URL to the resource in the Jar file * @throws IOException if there is an IO error */ public static URL getUniqueResource( String resourceName, String resourceDescription ) throws IOException { Enumeration<URL> resources = MyDefaultSchemaLdifExtractor.class.getClassLoader().getResources( resourceName ); if ( !resources.hasMoreElements() ) { throw new UniqueResourceException( resourceName, resourceDescription ); } URL result = resources.nextElement(); if ( resources.hasMoreElements() ) { throw new UniqueResourceException( resourceName, result, resources, resourceDescription); } return result; } /** * Extracts the LDIF schema resource from a Jar. * * @param resource the LDIF schema resource * @throws IOException if there are IO errors */ private void extractFromJar( String resource ) throws IOException { System.out.println("-->extractFromJar resource =" resource); byte[] buf = new byte[512]; InputStream in = MyDefaultSchemaLdifExtractor.getUniqueResourceAsStream( resource, "LDIF file in schema repository" ); try { File destination = new File( outputDirectory, resource ); System.out.println("--> destination =" destination.getAbsolutePath()); /* * Do not overwrite an LDIF file if it has already been extracted. */ if ( destination.exists() ) { System.out.println("--> destination not exists."); return; } if ( ! destination.getParentFile().exists() ) { destination.getParentFile().mkdirs(); } FileOutputStream out = new FileOutputStream( destination ); try { while ( in.available() > 0 ) { int readCount = in.read( buf ); out.write( buf, 0, readCount ); } out.flush(); } finally { out.close(); } } finally { in.close(); } } }
然後在Bundle 的context-xml 設定相關的bean,init-method....
<?xml version="1.0" encoding="UTF-8"?> <beans xmlns="http://www.springframework.org/schema/beans" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd"> <!-- regular spring configuration file defining the beans for this bundle. The configuration of OSGi definitions is kept in a separate configuration file so that this file can easily be used for integration testing outside of an OSGi environment --> <bean id="actavitor" class="com.embed.ldap.service.MyActavitor" init-method="start" destroy-method="stop"> <property name="embeddedAds" ref="embeddedADS" /> </bean> <bean id="ctxRsLoader" class="com.embed.ldap.service.internal.ContextResourceLoader"></bean> <bean id="embeddedADS" class="com.embed.ldap.service.EmbeddedADSVer157"> <constructor-arg> <ref bean="ctxRsLoader" /> </constructor-arg> <constructor-arg type="java.lang.String"> <value>classpath:META-INF/ldap/server-work/</value> </constructor-arg> </bean> </beans>
再來設定Bundle的MANIFEST.MF檔
Manifest-Version: 1.0 Export-Package: com.embed.ldap.service;version="1.0.0";uses:="org.apac he.directory.server.core.partition", org.apache.directory.server.core;version="1.5.7", org.apache.directory.server.core.changelog;version="1.5.7", org.apache.directory.server.core.entry;version="1.5.7", org.apache.directory.server.core.partition;version="1.5.7", org.apache.directory.server.core.partition.impl.btree.jdbm;version="1.5.7", org.apache.directory.server.core.partition.ldif;version="1.5.7", org.apache.directory.server.core.schema;version="1.5.7", org.apache.directory.server.ldap;version="1.5.7", org.apache.directory.server.protocol.shared.transport;version="1.5.7", org.apache.directory.shared.ldap.entry, org.apache.directory.shared.ldap.exception, org.apache.directory.shared.ldap.name, org.apache.directory.shared.ldap.schema, org.apache.directory.shared.ldap.schema.ldif.extractor, org.apache.directory.shared.ldap.schema.ldif.extractor.impl, org.apache.directory.shared.ldap.schema.loader.ldif, org.apache.directory.shared.ldap.schema.manager.impl, org.apache.directory.shared.ldap.schema.registries Build-Jdk: 1.6.0_24 Built-By: momo Bundle-Version: 1.0.0 Tool: Bnd-1.15.0 Bnd-LastModified: 1302856707906 Bundle-Name: Spring OSGi Bundle Bundle-ManifestVersion: 2 Bundle-ClassPath: .;/META-INF/lib/apacheds-all-1.5.7.jar Created-By: Apache Maven Bundle Plugin Bundle-SymbolicName: OSGiWebApp.embedLdap Import-Package: javax.naming, javax.naming.directory, javax.naming.ldap, javax.net.ssl, javax.security.auth.x500, javax.security.sasl, org.apache.directory.server.core;version="[1.0.2,1.0.2]", org.apache.directory.server.core.changelog;version="1.5.7", org.apache.directory.server.core.entry;version="1.5.7", org.apache.directory.server.core.partition;version="1.5.7", org.apache.directory.server.core.partition.impl.btree.jdbm;version="1.5.7", org.apache.directory.server.core.partition.ldif;version="1.5.7", org.apache.directory.server.core.schema;version="1.5.7", org.apache.directory.server.ldap;version="1.5.7", org.apache.directory.server.protocol.shared.transport;version="1.5.7", org.apache.directory.shared.ldap.entry, org.apache.directory.shared.ldap.exception, org.apache.directory.shared.ldap.name, org.apache.directory.shared.ldap.schema, org.apache.directory.shared.ldap.schema.ldif.extractor, org.apache.directory.shared.ldap.schema.ldif.extractor.impl, org.apache.directory.shared.ldap.schema.loader.ldif, org.apache.directory.shared.ldap.schema.manager.impl, org.apache.directory.shared.ldap.schema.registries, org.slf4j;version="1.6.1", org.springframework.beans, org.springframework.context, org.springframework.core.io
檔案結構如下
* 這邊有一個問題就是EmbeddedADSVer157.java 在constructor 裡會設定workDir
workDir就是LDAP 相關檔案建立的位置, schema會從jar裡面拿出來,
解法是先在java.io.tmpdir 路徑裡先把ApacheDS的schema都先從jar裡先拿出來放
這樣在server start時就不會再去找jar file, 因為Bundle 的classpath 是沒辦法直接拿到實體檔
必須透過Bundle Context 取Resource or Spring 的Application Context 取Resource
System.getProperty( "java.io.tmpdir" ) "/server-work"最主要是因為ApacheDS裡面的ResourceMap在getResources時取的classpath為
String classPath = System.getProperty( "java.class.path", "." );在Eclispe Virgo 的話會取到C:\Eclipse-Virgo-WebServer\lib ......
所以上面才會用tempdir的路徑,eclipse virgo裡是C:\Eclipse-Virgo-WebServer\work\tmp
不然就必須去修改相關getResource的class, 跟OSGi Bundle的Resource match
沒有留言:
張貼留言