View Javadoc

1   package org.apache.maven.archiva.scheduled.executors;
2   
3   /*
4    * Licensed to the Apache Software Foundation (ASF) under one
5    * or more contributor license agreements.  See the NOTICE file
6    * distributed with this work for additional information
7    * regarding copyright ownership.  The ASF licenses this file
8    * to you under the Apache License, Version 2.0 (the
9    * "License"); you may not use this file except in compliance
10   * with the License.  You may obtain a copy of the License at
11   *
12   *  http://www.apache.org/licenses/LICENSE-2.0
13   *
14   * Unless required by applicable law or agreed to in writing,
15   * software distributed under the License is distributed on an
16   * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
17   * KIND, either express or implied.  See the License for the
18   * specific language governing permissions and limitations
19   * under the License.
20   */
21  
22  import java.io.File;
23  import java.net.URL;
24  import java.util.Calendar;
25  import java.util.List;
26  import java.util.Map;
27  import java.util.Properties;
28  import javax.jdo.PersistenceManager;
29  import javax.jdo.PersistenceManagerFactory;
30  
31  import org.apache.maven.archiva.configuration.ArchivaConfiguration;
32  import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
33  import org.apache.maven.archiva.database.ArchivaDAO;
34  import org.apache.maven.archiva.database.ArtifactDAO;
35  import org.apache.maven.archiva.database.constraints.ArtifactsProcessedConstraint;
36  import org.apache.maven.archiva.database.constraints.MostRecentRepositoryScanStatistics;
37  import org.apache.maven.archiva.model.ArchivaArtifact;
38  import org.apache.maven.archiva.model.RepositoryContentStatistics;
39  import org.apache.maven.archiva.scheduled.tasks.RepositoryTask;
40  import org.codehaus.plexus.jdo.DefaultConfigurableJdoFactory;
41  import org.codehaus.plexus.jdo.JdoFactory;
42  import org.codehaus.plexus.spring.PlexusInSpringTestCase;
43  import org.codehaus.plexus.taskqueue.execution.TaskExecutor;
44  import org.codehaus.plexus.util.FileUtils;
45  import org.jpox.SchemaTool;
46  
47  /**
48   * ArchivaRepositoryScanningTaskExecutorTest
49   *
50   * @version $Id: ArchivaRepositoryScanningTaskExecutorTest.java 887051 2009-12-04 04:07:12Z brett $
51   */
52  public class ArchivaRepositoryScanningTaskExecutorTest
53      extends PlexusInSpringTestCase
54  {
55      private TaskExecutor taskExecutor;
56  
57      protected ArchivaDAO dao;
58  
59      private File repoDir;
60  
61      private static final String TEST_REPO_ID = "testRepo";
62  
63      protected void setUp()
64          throws Exception
65      {
66          super.setUp();
67  
68          DefaultConfigurableJdoFactory jdoFactory = (DefaultConfigurableJdoFactory) lookup( JdoFactory.ROLE, "archiva" );
69          assertEquals( DefaultConfigurableJdoFactory.class.getName(), jdoFactory.getClass().getName() );
70  
71          jdoFactory.setPersistenceManagerFactoryClass( "org.jpox.PersistenceManagerFactoryImpl" );
72  
73          /* derby version
74         File derbyDbDir = new File( "target/plexus-home/testdb" );
75         if ( derbyDbDir.exists() )
76         {
77             FileUtils.deleteDirectory( derbyDbDir );
78         }
79  
80         jdoFactory.setDriverName( System.getProperty( "jdo.test.driver", "org.apache.derby.jdbc.EmbeddedDriver" ) );
81         jdoFactory.setUrl( System.getProperty( "jdo.test.url", "jdbc:derby:" + derbyDbDir.getAbsolutePath() + ";create=true" ) );
82          */
83  
84          jdoFactory.setDriverName( System.getProperty( "jdo.test.driver", "org.hsqldb.jdbcDriver" ) );
85          jdoFactory.setUrl( System.getProperty( "jdo.test.url", "jdbc:hsqldb:mem:" + getName() ) );
86  
87          jdoFactory.setUserName( System.getProperty( "jdo.test.user", "sa" ) );
88  
89          jdoFactory.setPassword( System.getProperty( "jdo.test.pass", "" ) );
90  
91          jdoFactory.setProperty( "org.jpox.transactionIsolation", "READ_COMMITTED" );
92  
93          jdoFactory.setProperty( "org.jpox.poid.transactionIsolation", "READ_COMMITTED" );
94  
95          jdoFactory.setProperty( "org.jpox.autoCreateSchema", "true" );
96  
97          jdoFactory.setProperty( "javax.jdo.option.RetainValues", "true" );
98  
99          jdoFactory.setProperty( "javax.jdo.option.RestoreValues", "true" );
100 
101         // jdoFactory.setProperty( "org.jpox.autoCreateColumns", "true" );
102 
103         jdoFactory.setProperty( "org.jpox.validateTables", "true" );
104 
105         jdoFactory.setProperty( "org.jpox.validateColumns", "true" );
106 
107         jdoFactory.setProperty( "org.jpox.validateConstraints", "true" );
108 
109         Properties properties = jdoFactory.getProperties();
110 
111         for ( Map.Entry<Object, Object> entry : properties.entrySet() )
112         {
113             System.setProperty( (String) entry.getKey(), (String) entry.getValue() );
114         }
115 
116         URL jdoFileUrls[] = new URL[]{getClass().getResource( "/org/apache/maven/archiva/model/package.jdo" )};
117 
118         if ( ( jdoFileUrls == null ) || ( jdoFileUrls[0] == null ) )
119         {
120             fail( "Unable to process test " + getName() + " - missing package.jdo." );
121         }
122 
123         File propsFile = null; // intentional
124         boolean verbose = true;
125 
126         SchemaTool.deleteSchemaTables( jdoFileUrls, new URL[]{}, propsFile, verbose );
127         SchemaTool.createSchemaTables( jdoFileUrls, new URL[]{}, propsFile, verbose, null );
128 
129         PersistenceManagerFactory pmf = jdoFactory.getPersistenceManagerFactory();
130 
131         assertNotNull( pmf );
132 
133         PersistenceManager pm = pmf.getPersistenceManager();
134 
135         pm.close();
136 
137         this.dao = (ArchivaDAO) lookup( ArchivaDAO.class.getName(), "jdo" );
138 
139         taskExecutor = (TaskExecutor) lookup( TaskExecutor.class, "test-repository-scanning" );
140 
141         File sourceRepoDir = new File( getBasedir(), "src/test/repositories/default-repository" );
142         repoDir = new File( getBasedir(), "target/default-repository" );
143 
144         FileUtils.deleteDirectory( repoDir );
145         assertFalse( "Default Test Repository should not exist.", repoDir.exists() );
146 
147         repoDir.mkdir();
148 
149         FileUtils.copyDirectoryStructure( sourceRepoDir, repoDir );
150         // set the timestamps to a time well in the past
151         Calendar cal = Calendar.getInstance();
152         cal.add( Calendar.YEAR, -1 );
153         for ( File f : (List<File>) FileUtils.getFiles( repoDir, "**", null ) )
154         {
155             f.setLastModified( cal.getTimeInMillis() );
156         }
157         for ( String dir : (List<String>) FileUtils.getDirectoryNames( repoDir, "**/.svn", null, false ) )
158         {
159             FileUtils.deleteDirectory( new File( repoDir, dir ) );
160         }
161 
162         assertTrue( "Default Test Repository should exist.", repoDir.exists() && repoDir.isDirectory() );
163 
164         ArchivaConfiguration archivaConfig = (ArchivaConfiguration) lookup( ArchivaConfiguration.class );
165         assertNotNull( archivaConfig );
166 
167         // Create it
168         ManagedRepositoryConfiguration repositoryConfiguration = new ManagedRepositoryConfiguration();
169         repositoryConfiguration.setId( TEST_REPO_ID );
170         repositoryConfiguration.setName( "Test Repository" );
171         repositoryConfiguration.setLocation( repoDir.getAbsolutePath() );
172         archivaConfig.getConfiguration().getManagedRepositories().clear();
173         archivaConfig.getConfiguration().addManagedRepository( repositoryConfiguration );
174     }
175 
176     protected void tearDown()
177         throws Exception
178     {
179         FileUtils.deleteDirectory( repoDir );
180 
181         assertFalse( repoDir.exists() );
182 
183         super.tearDown();
184     }
185 
186     public void testExecutor()
187         throws Exception
188     {
189         RepositoryTask repoTask = new RepositoryTask();
190 
191         repoTask.setRepositoryId( TEST_REPO_ID );
192 
193         taskExecutor.executeTask( repoTask );
194 
195         ArtifactDAO adao = dao.getArtifactDAO();
196         List<ArchivaArtifact> unprocessedResultList = adao.queryArtifacts( new ArtifactsProcessedConstraint( false ) );
197 
198         assertNotNull( unprocessedResultList );
199         assertEquals( "Incorrect number of unprocessed artifacts detected.", 8, unprocessedResultList.size() );
200     }
201 
202     public void testExecutorScanOnlyNewArtifacts()
203         throws Exception
204     {
205         RepositoryTask repoTask = new RepositoryTask();
206 
207         repoTask.setRepositoryId( TEST_REPO_ID );
208         repoTask.setScanAll( false );
209 
210         createAndSaveTestStats();
211 
212         taskExecutor.executeTask( repoTask );
213 
214         // check no artifacts processed
215         ArtifactDAO adao = dao.getArtifactDAO();
216         List<ArchivaArtifact> unprocessedResultList = adao.queryArtifacts( new ArtifactsProcessedConstraint( false ) );
217 
218         assertNotNull( unprocessedResultList );
219         assertEquals( "Incorrect number of unprocessed artifacts detected. No new artifacts should have been found.", 0,
220                       unprocessedResultList.size() );
221 
222         // check correctness of new stats
223         List<RepositoryContentStatistics> results =
224             (List<RepositoryContentStatistics>) dao.query( new MostRecentRepositoryScanStatistics( TEST_REPO_ID ) );
225         RepositoryContentStatistics newStats = results.get( 0 );
226         assertEquals( 0, newStats.getNewFileCount() );
227         assertEquals( TEST_REPO_ID, newStats.getRepositoryId() );
228         assertEquals( 31, newStats.getTotalFileCount() );
229         // TODO: can't test these as they weren't stored in the database
230 //        assertEquals( 8, newStats.getTotalArtifactCount() );
231 //        assertEquals( 3, newStats.getTotalGroupCount() );
232 //        assertEquals( 5, newStats.getTotalProjectCount() );
233         assertEquals( 14159, newStats.getTotalSize() );
234 
235         File newArtifactGroup = new File( repoDir, "org/apache/archiva" );
236 
237         FileUtils.copyDirectoryStructure( new File( getBasedir(), "target/test-classes/test-repo/org/apache/archiva" ),
238                                           newArtifactGroup );
239 
240         // update last modified date
241         new File( newArtifactGroup, "archiva-index-methods-jar-test/1.0/pom.xml" ).setLastModified(
242             Calendar.getInstance().getTimeInMillis() + 1000 );
243         new File( newArtifactGroup,
244                   "archiva-index-methods-jar-test/1.0/archiva-index-methods-jar-test-1.0.jar" ).setLastModified(
245             Calendar.getInstance().getTimeInMillis() + 1000 );
246 
247         assertTrue( newArtifactGroup.exists() );
248 
249         taskExecutor.executeTask( repoTask );
250 
251         unprocessedResultList = adao.queryArtifacts( new ArtifactsProcessedConstraint( false ) );
252         assertNotNull( unprocessedResultList );
253         assertEquals( "Incorrect number of unprocessed artifacts detected. One new artifact should have been found.", 1,
254                       unprocessedResultList.size() );
255 
256         // check correctness of new stats
257         results =
258             (List<RepositoryContentStatistics>) dao.query( new MostRecentRepositoryScanStatistics( TEST_REPO_ID ) );
259         RepositoryContentStatistics updatedStats = results.get( 0 );
260         assertEquals( 2, updatedStats.getNewFileCount() );
261         assertEquals( TEST_REPO_ID, updatedStats.getRepositoryId() );
262         assertEquals( 33, updatedStats.getTotalFileCount() );
263         // TODO: can't test these as they weren't stored in the database
264 //        assertEquals( 8, newStats.getTotalArtifactCount() );
265 //        assertEquals( 3, newStats.getTotalGroupCount() );
266 //        assertEquals( 5, newStats.getTotalProjectCount() );
267         assertEquals( 19301, updatedStats.getTotalSize() );
268     }
269 
270     public void testExecutorScanOnlyNewArtifactsChangeTimes()
271         throws Exception
272     {
273         RepositoryTask repoTask = new RepositoryTask();
274 
275         repoTask.setRepositoryId( TEST_REPO_ID );
276         repoTask.setScanAll( false );
277 
278         createAndSaveTestStats();
279 
280         File newArtifactGroup = new File( repoDir, "org/apache/archiva" );
281 
282         FileUtils.copyDirectoryStructure( new File( getBasedir(), "target/test-classes/test-repo/org/apache/archiva" ),
283                                           newArtifactGroup );
284 
285         // update last modified date, placing shortly after last scan
286         new File( newArtifactGroup, "archiva-index-methods-jar-test/1.0/pom.xml" ).setLastModified(
287             Calendar.getInstance().getTimeInMillis() + 1000 );
288         new File( newArtifactGroup,
289                   "archiva-index-methods-jar-test/1.0/archiva-index-methods-jar-test-1.0.jar" ).setLastModified(
290             Calendar.getInstance().getTimeInMillis() + 1000 );
291 
292         assertTrue( newArtifactGroup.exists() );
293 
294         // scan using the really long previous duration
295         taskExecutor.executeTask( repoTask );
296 
297         // check no artifacts processed
298         ArtifactDAO adao = dao.getArtifactDAO();
299         List<ArchivaArtifact> unprocessedResultList = adao.queryArtifacts( new ArtifactsProcessedConstraint( false ) );
300         assertNotNull( unprocessedResultList );
301         assertEquals( "Incorrect number of unprocessed artifacts detected. One new artifact should have been found.", 1,
302                       unprocessedResultList.size() );
303 
304         // check correctness of new stats
305         List<RepositoryContentStatistics> results =
306             (List<RepositoryContentStatistics>) dao.query( new MostRecentRepositoryScanStatistics( TEST_REPO_ID ) );
307         RepositoryContentStatistics newStats = results.get( 0 );
308         assertEquals( 2, newStats.getNewFileCount() );
309         assertEquals( TEST_REPO_ID, newStats.getRepositoryId() );
310         assertEquals( 33, newStats.getTotalFileCount() );
311         // TODO: can't test these as they weren't stored in the database
312 //        assertEquals( 8, newStats.getTotalArtifactCount() );
313 //        assertEquals( 3, newStats.getTotalGroupCount() );
314 //        assertEquals( 5, newStats.getTotalProjectCount() );
315         assertEquals( 19301, newStats.getTotalSize() );
316     }
317 
318     public void testExecutorScanOnlyNewArtifactsMidScan()
319         throws Exception
320     {
321         RepositoryTask repoTask = new RepositoryTask();
322 
323         repoTask.setRepositoryId( TEST_REPO_ID );
324         repoTask.setScanAll( false );
325 
326         createAndSaveTestStats();
327 
328         File newArtifactGroup = new File( repoDir, "org/apache/archiva" );
329 
330         FileUtils.copyDirectoryStructure( new File( getBasedir(), "target/test-classes/test-repo/org/apache/archiva" ),
331                                           newArtifactGroup );
332 
333         // update last modified date, placing in middle of last scan
334         new File( newArtifactGroup, "archiva-index-methods-jar-test/1.0/pom.xml" ).setLastModified(
335             Calendar.getInstance().getTimeInMillis() - 50000 );
336         new File( newArtifactGroup,
337                   "archiva-index-methods-jar-test/1.0/archiva-index-methods-jar-test-1.0.jar" ).setLastModified(
338             Calendar.getInstance().getTimeInMillis() - 50000 );
339 
340         assertTrue( newArtifactGroup.exists() );
341 
342         // scan using the really long previous duration
343         taskExecutor.executeTask( repoTask );
344 
345         // check no artifacts processed
346         ArtifactDAO adao = dao.getArtifactDAO();
347         List<ArchivaArtifact> unprocessedResultList = adao.queryArtifacts( new ArtifactsProcessedConstraint( false ) );
348         assertNotNull( unprocessedResultList );
349         assertEquals( "Incorrect number of unprocessed artifacts detected. One new artifact should have been found.", 1,
350                       unprocessedResultList.size() );
351 
352         // check correctness of new stats
353         List<RepositoryContentStatistics> results =
354             (List<RepositoryContentStatistics>) dao.query( new MostRecentRepositoryScanStatistics( TEST_REPO_ID ) );
355         RepositoryContentStatistics newStats = results.get( 0 );
356         assertEquals( 2, newStats.getNewFileCount() );
357         assertEquals( TEST_REPO_ID, newStats.getRepositoryId() );
358         assertEquals( 33, newStats.getTotalFileCount() );
359         // TODO: can't test these as they weren't stored in the database
360 //        assertEquals( 8, newStats.getTotalArtifactCount() );
361 //        assertEquals( 3, newStats.getTotalGroupCount() );
362 //        assertEquals( 5, newStats.getTotalProjectCount() );
363         assertEquals( 19301, newStats.getTotalSize() );
364     }
365 
366     private void createAndSaveTestStats()
367     {
368         RepositoryContentStatistics stats = new RepositoryContentStatistics();
369         stats.setDuration( 1234567 );
370         stats.setNewFileCount( 31 );
371         stats.setRepositoryId( TEST_REPO_ID );
372         stats.setTotalArtifactCount( 8 );
373         stats.setTotalFileCount( 31 );
374         stats.setTotalGroupCount( 3 );
375         stats.setTotalProjectCount( 5 );
376         stats.setTotalSize( 14159 );
377         stats.setWhenGathered( Calendar.getInstance().getTime() );
378 
379         dao.getRepositoryContentStatisticsDAO().saveRepositoryContentStatistics( stats );
380     }
381 
382     public void testExecutorForceScanAll()
383         throws Exception
384     {
385         RepositoryTask repoTask = new RepositoryTask();
386 
387         repoTask.setRepositoryId( TEST_REPO_ID );
388         repoTask.setScanAll( true );
389 
390         RepositoryContentStatistics stats = new RepositoryContentStatistics();
391         stats.setDuration( 1234567 );
392         stats.setNewFileCount( 8 );
393         stats.setRepositoryId( TEST_REPO_ID );
394         stats.setTotalArtifactCount( 8 );
395         stats.setTotalFileCount( 8 );
396         stats.setTotalGroupCount( 3 );
397         stats.setTotalProjectCount( 5 );
398         stats.setTotalSize( 999999 );
399         stats.setWhenGathered( Calendar.getInstance().getTime() );
400 
401         dao.getRepositoryContentStatisticsDAO().saveRepositoryContentStatistics( stats );
402 
403         taskExecutor.executeTask( repoTask );
404 
405         ArtifactDAO adao = dao.getArtifactDAO();
406         List<ArchivaArtifact> unprocessedResultList = adao.queryArtifacts( new ArtifactsProcessedConstraint( false ) );
407 
408         assertNotNull( unprocessedResultList );
409         assertEquals( "Incorrect number of unprocessed artifacts detected.", 8, unprocessedResultList.size() );
410     }
411 }