Coverage Report - org.apache.maven.archiva.scheduled.executors.ArchivaRepositoryScanningTaskExecutor
 
Classes in this File Line Coverage Branch Coverage Complexity
ArchivaRepositoryScanningTaskExecutor
0%
0/45
0%
0/10
0
 
 1  
 package org.apache.maven.archiva.scheduled.executors;
 2  
 
 3  
 /*
 4  
  * Licensed to the Apache Software Foundation (ASF) under one
 5  
  * or more contributor license agreements.  See the NOTICE file
 6  
  * distributed with this work for additional information
 7  
  * regarding copyright ownership.  The ASF licenses this file
 8  
  * to you under the Apache License, Version 2.0 (the
 9  
  * "License"); you may not use this file except in compliance
 10  
  * with the License.  You may obtain a copy of the License at
 11  
  *
 12  
  *  http://www.apache.org/licenses/LICENSE-2.0
 13  
  *
 14  
  * Unless required by applicable law or agreed to in writing,
 15  
  * software distributed under the License is distributed on an
 16  
  * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 17  
  * KIND, either express or implied.  See the License for the
 18  
  * specific language governing permissions and limitations
 19  
  * under the License.
 20  
  */
 21  
 
 22  
 import org.apache.commons.collections.CollectionUtils;
 23  
 import org.apache.commons.io.FileUtils;
 24  
 import org.apache.commons.lang.StringUtils;
 25  
 import org.apache.maven.archiva.configuration.ArchivaConfiguration;
 26  
 import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
 27  
 import org.apache.maven.archiva.database.ArchivaDAO;
 28  
 import org.apache.maven.archiva.database.constraints.ArtifactsByRepositoryConstraint;
 29  
 import org.apache.maven.archiva.database.constraints.MostRecentRepositoryScanStatistics;
 30  
 import org.apache.maven.archiva.database.constraints.UniqueArtifactIdConstraint;
 31  
 import org.apache.maven.archiva.database.constraints.UniqueGroupIdConstraint;
 32  
 import org.apache.maven.archiva.model.RepositoryContentStatistics;
 33  
 import org.apache.maven.archiva.repository.RepositoryException;
 34  
 import org.apache.maven.archiva.repository.scanner.RepositoryContentConsumers;
 35  
 import org.apache.maven.archiva.repository.scanner.RepositoryScanStatistics;
 36  
 import org.apache.maven.archiva.repository.scanner.RepositoryScanner;
 37  
 import org.apache.maven.archiva.scheduled.tasks.RepositoryTask;
 38  
 import org.codehaus.plexus.personality.plexus.lifecycle.phase.Initializable;
 39  
 import org.codehaus.plexus.personality.plexus.lifecycle.phase.InitializationException;
 40  
 import org.codehaus.plexus.taskqueue.Task;
 41  
 import org.codehaus.plexus.taskqueue.execution.TaskExecutionException;
 42  
 import org.codehaus.plexus.taskqueue.execution.TaskExecutor;
 43  
 import org.slf4j.Logger;
 44  
 import org.slf4j.LoggerFactory;
 45  
 
 46  
 import java.io.File;
 47  
 import java.util.ArrayList;
 48  
 import java.util.List;
 49  
 
 50  
 /**
 51  
  * ArchivaRepositoryScanningTaskExecutor
 52  
  *
 53  
  * @version $Id: ArchivaRepositoryScanningTaskExecutor.java 1043850 2010-12-09 07:58:00Z brett $
 54  
  * @plexus.component role="org.codehaus.plexus.taskqueue.execution.TaskExecutor"
 55  
  * role-hint="repository-scanning"
 56  
  */
 57  0
 public class ArchivaRepositoryScanningTaskExecutor
 58  
     implements TaskExecutor, Initializable
 59  
 {
 60  0
     private Logger log = LoggerFactory.getLogger( ArchivaRepositoryScanningTaskExecutor.class );
 61  
 
 62  
     /**
 63  
      * @plexus.requirement role-hint="jdo"
 64  
      */
 65  
     private ArchivaDAO dao;
 66  
 
 67  
     /**
 68  
      * @plexus.requirement
 69  
      */
 70  
     private ArchivaConfiguration archivaConfiguration;
 71  
 
 72  
     /**
 73  
      * The repository scanner component.
 74  
      *
 75  
      * @plexus.requirement
 76  
      */
 77  
     private RepositoryScanner repoScanner;
 78  
 
 79  
     /**
 80  
      * @plexus.requirement
 81  
      */
 82  
     private RepositoryContentConsumers consumers;
 83  
 
 84  
     private Task task;
 85  
 
 86  
     public void initialize()
 87  
         throws InitializationException
 88  
     {
 89  0
         log.info( "Initialized " + this.getClass().getName() );
 90  0
     }
 91  
 
 92  
     @SuppressWarnings("unchecked")
 93  
     public void executeTask( Task task )
 94  
         throws TaskExecutionException
 95  
     {
 96  0
         this.task = task;
 97  
 
 98  0
         RepositoryTask repoTask = (RepositoryTask) task;
 99  
 
 100  0
         if ( StringUtils.isBlank( repoTask.getRepositoryId() ) )
 101  
         {
 102  0
             throw new TaskExecutionException( "Unable to execute RepositoryTask with blank repository Id." );
 103  
         }
 104  
 
 105  0
         ManagedRepositoryConfiguration arepo =
 106  
             archivaConfiguration.getConfiguration().findManagedRepositoryById( repoTask.getRepositoryId() );
 107  
 
 108  
         // execute consumers on resource file if set
 109  0
         if ( repoTask.getResourceFile() != null )
 110  
         {
 111  0
             log.debug( "Executing task from queue with job name: " + repoTask );
 112  0
             consumers.executeConsumers( arepo, repoTask.getResourceFile(), repoTask.isUpdateRelatedArtifacts() );
 113  
         }
 114  
         else
 115  
         {
 116  0
             log.info( "Executing task from queue with job name: " + repoTask );
 117  
 
 118  
             // otherwise, execute consumers on whole repository
 119  
             try
 120  
             {
 121  0
                 if ( arepo == null )
 122  
                 {
 123  0
                     throw new TaskExecutionException(
 124  
                         "Unable to execute RepositoryTask with invalid repository id: " + repoTask.getRepositoryId() );
 125  
                 }
 126  
 
 127  0
                 long sinceWhen = RepositoryScanner.FRESH_SCAN;
 128  
 
 129  0
                 List<RepositoryContentStatistics> results = (List<RepositoryContentStatistics>) dao.query(
 130  
                     new MostRecentRepositoryScanStatistics( arepo.getId() ) );
 131  
 
 132  0
                 if ( CollectionUtils.isNotEmpty( results ) )
 133  
                 {
 134  0
                     RepositoryContentStatistics lastStats = results.get( 0 );
 135  0
                     if ( !repoTask.isScanAll() )
 136  
                     {
 137  0
                         sinceWhen = lastStats.getWhenGathered().getTime() - lastStats.getDuration();
 138  
                     }
 139  
                 }
 140  
 
 141  0
                 RepositoryScanStatistics stats = repoScanner.scan( arepo, sinceWhen );
 142  
 
 143  0
                 log.info( "Finished repository task: " + stats.toDump( arepo ) );
 144  
 
 145  0
                 RepositoryContentStatistics dbstats = constructRepositoryStatistics( arepo, stats );
 146  
 
 147  0
                 dao.getRepositoryContentStatisticsDAO().saveRepositoryContentStatistics( dbstats );
 148  
 
 149  0
                 this.task = null;
 150  
             }
 151  0
             catch ( RepositoryException e )
 152  
             {
 153  0
                 throw new TaskExecutionException( "Repository error when executing repository job.", e );
 154  0
             }
 155  
         }
 156  0
     }
 157  
 
 158  
     @SuppressWarnings("unchecked")
 159  
     private RepositoryContentStatistics constructRepositoryStatistics( ManagedRepositoryConfiguration arepo,
 160  
                                                                        RepositoryScanStatistics stats )
 161  
     {
 162  
         // I hate jpox and modello <-- and so do I
 163  0
         RepositoryContentStatistics dbstats = new RepositoryContentStatistics();
 164  0
         dbstats.setDuration( stats.getDuration() );
 165  0
         dbstats.setNewFileCount( stats.getNewFileCount() );
 166  0
         dbstats.setRepositoryId( stats.getRepositoryId() );
 167  0
         dbstats.setTotalFileCount( stats.getTotalFileCount() );
 168  0
         dbstats.setWhenGathered( stats.getWhenGathered() );
 169  
 
 170  
         // total artifact count
 171  
         // note that when gathered is the end of the scan, so we look for all those before that time
 172  0
         dbstats.setTotalArtifactCount( dao.getArtifactDAO().countArtifacts(
 173  
             new ArtifactsByRepositoryConstraint( arepo.getId(), stats.getWhenGathered(), "groupId", true ) ) );
 174  
 
 175  
         // total repo size -- TODO: needs to exclude ignored files (eg .svn)
 176  0
         long size = FileUtils.sizeOfDirectory( new File( arepo.getLocation() ) );
 177  0
         dbstats.setTotalSize( size );
 178  
 
 179  
         // total unique groups
 180  0
         List<String> repos = new ArrayList<String>();
 181  0
         repos.add( arepo.getId() );
 182  
 
 183  0
         dbstats.setTotalGroupCount( dao.count( new UniqueGroupIdConstraint( repos ) ) );
 184  
 
 185  0
         dbstats.setTotalProjectCount( dao.count( new UniqueArtifactIdConstraint( arepo.getId(), true ) ) );
 186  
 
 187  0
         return dbstats;
 188  
     }
 189  
 
 190  
     public Task getCurrentTaskInExecution()
 191  
     {
 192  0
         return task;
 193  
     }
 194  
 }