Package com.bazaarvoice.maven.plugin.s3repo

Examples of com.bazaarvoice.maven.plugin.s3repo.S3RepositoryPath


    }

    /** Return list of repo-relative file paths. */
    private List<String> internalListRepository(ListContext context) throws MojoExecutionException {
        List<String> list = Lists.newArrayList();
        S3RepositoryPath s3RepositoryPath = context.getS3RepositoryPath();
        Set<String> filesListedInMetadata = Sets.newHashSet(); // will remain empty if filterByMetadata = false
        if (filterByMetadata) {
            // assert: metadata is downloaded, so we can:
            filesListedInMetadata.addAll(context.getLocalYumRepo().parseFileListFromRepoMetadata());
            getLog().debug("files listed in metadata = " + filesListedInMetadata);
        }
        // note: filesListedInMetadata are **repo-relative** file paths.
        ListObjectsRequest listObjectsRequest = new ListObjectsRequest()
            .withBucketName(s3RepositoryPath.getBucketName());
        String prefix = ""; // capture prefix for debug logging
        if (s3RepositoryPath.hasBucketRelativeFolder()) {
            prefix = s3RepositoryPath.getBucketRelativeFolder() + "/";
            listObjectsRequest.withPrefix(prefix);
        }
        List<S3ObjectSummary> result = S3Utils.listAllObjects(context.getS3Session(), listObjectsRequest);
        for (S3ObjectSummary summary : result) {
            if (summary.getKey().endsWith("/")) {
                getLog().debug("Will not list " + summary.getKey() + ", it's a folder");
                continue;
            }
            if (isMetadataFile(summary, s3RepositoryPath)) {
                getLog().debug("Will not list " + summary.getKey() + ", it's a metadata file");
                continue;
            }
            String asRepoRelativeFile =
                s3RepositoryPath.hasBucketRelativeFolder()
                    ? summary.getKey().replaceFirst("^\\Q" + s3RepositoryPath.getBucketRelativeFolder() + "/\\E", "")
                    : summary.getKey();
            if (filterByMetadata && !filesListedInMetadata.contains(asRepoRelativeFile)) {
                getLog().debug("Not known to metadata: " + summary.getKey() + " (repo-relative: " + asRepoRelativeFile + ")");
            }
            // Assert: summary.getKey() is a file that exists as a file in the S3 repo AND
View Full Code Here


    private void maybeDownloadRepositoryMetadata(ListContext context) throws MojoExecutionException {
        if (!filterByMetadata) {
            getLog().info("Will not filter file list using YUM metadata.");
            return;
        }
        S3RepositoryPath s3RepositoryPath = context.getS3RepositoryPath();
        ListObjectsRequest listObjectsRequest = new ListObjectsRequest()
            .withBucketName(s3RepositoryPath.getBucketName());
        String prefix = ""; // capture prefix for debug logging
        if (s3RepositoryPath.hasBucketRelativeFolder()) {
            prefix = s3RepositoryPath.getBucketRelativeFolder() + "/" + WellKnowns.YUM_REPODATA_FOLDERNAME + "/";
            listObjectsRequest.withPrefix(prefix);
        }
        List<S3ObjectSummary> result = S3Utils.listAllObjects(context.getS3Session(), listObjectsRequest);
        for (S3ObjectSummary summary : result) {
            if (summary.getKey().endsWith("/")) {
                getLog().debug("Will not list " + summary.getKey() + ", it's a folder");
                continue;
            }
            getLog().info("Downloading metadata file '" + summary.getKey() + "' from S3...");
            final S3Object object = context.getS3Session()
                .getObject(new GetObjectRequest(s3RepositoryPath.getBucketName(), summary.getKey()));
            try {
                File targetFile =
                    new File(stagingDirectory, /*assume object key is bucket-relative path to filename with extension*/summary.getKey());
                Files.createParentDirs(targetFile);
                FileUtils.copyStreamToFile(new InputStreamFacade() {
View Full Code Here

        }
    }

    private S3RepositoryPath parseS3RepositoryPath() throws MojoExecutionException {
        try {
            S3RepositoryPath parsed = S3RepositoryPath.parse(s3RepositoryPath);
            if (parsed.hasBucketRelativeFolder()) {
                getLog().info("Using bucket '" + parsed.getBucketName() + "' and folder '" + parsed.getBucketRelativeFolder() + "' as repository...");
            } else {
                getLog().info("Using bucket '" + parsed.getBucketName() + "' as repository...");
            }
            return parsed;
        } catch (Exception e) {
            throw new MojoExecutionException("Failed to parse S3 repository path: " + s3RepositoryPath, e);
        }
View Full Code Here

        String logPrefix = "";
        if (doNotUpload) {
            getLog().info("Per configuration, we will NOT perform any remote operations on the S3 repository.");
            logPrefix = "SKIPPING: ";
        }
        final S3RepositoryPath targetRepository = context.getS3TargetRepositoryPath();
        final String targetBucket = targetRepository.getBucketName();
        AmazonS3 s3Session = context.getS3Session();
        File directoryToUpload = uploadMetadataOnly
                ? context.getLocalYumRepo().repoDataDirectory() // only the repodata directory
                : stagingDirectory; // the entire staging directory/bucket
        if (!allowCreateRepository && !context.getLocalYumRepo().isRepoDataExists()) {
            throw new MojoExecutionException("refusing to create new repo: " + targetRepository +
                " (use s3repo.allowCreateRepository = true to force)");
        }
        for (File toUpload : ExtraIOUtils.listAllFiles(directoryToUpload)) {
            final String bucketKey = localFileToTargetS3BucketKey(toUpload, context);
            getLog().info(logPrefix + "Uploading: " + toUpload.getName() + " => s3://" + targetRepository.getBucketName() + "/" + bucketKey + "...");
            if (!doNotUpload) {
                s3Session.putObject(new PutObjectRequest(targetBucket, bucketKey, toUpload));
            }
        }
        if (uploadMetadataOnly && !context.sourceAndTargetRepositoryAreSame()) {
            // we just uploaded metadata but there are files in the source repository
            // that don't exist in the target, so we upload those here.
            for (File toUpload : ExtraIOUtils.listAllFiles(stagingDirectory)) {
                if (!context.getFilesFromTargetRepo().contains(toUpload)) {
                    // upload if it's not already in the target repo.
                    final String bucketKey = localFileToTargetS3BucketKey(toUpload, context);
                    getLog().info(logPrefix + "Uploading: " + toUpload.getName()
                        + " => s3://" + targetRepository.getBucketName() + "/" + bucketKey + "...");
                    if (!doNotUpload) {
                        s3Session.putObject(new PutObjectRequest(targetBucket, bucketKey, toUpload));
                    }
                }
            }
        }
        // delete any excluded files remotely from the TARGET only.
        for (String repoRelativePath : context.getExcludedFilesToDeleteFromTarget()) {
            final String bucketKey = toBucketKey(targetRepository, repoRelativePath);
            getLog().info(logPrefix + "Deleting: "
                + "s3://" + targetRepository.getBucketName() + "/" + bucketKey + " (excluded file)");
            if (!doNotUpload) {
                context.getS3Session().deleteObject(targetBucket, bucketKey);
            }
        }
        // and finally, delete any remote bucket keys we wish to remove (e.g., old snaphots)...from the TARGET only.
        for (SnapshotDescription toDelete : context.getSnapshotsToDeleteRemotely()) {
            getLog().info(logPrefix + "Deleting: "
                + "s3://" + targetRepository.getBucketName() + "/" + toDelete.getBucketKey() + " (excluded file)");
            getLog().info(logPrefix + "Deleting: " + toDelete + " (old snapshot)");
            if (!doNotUpload) {
                context.getS3Session().deleteObject(targetBucket, toDelete.getBucketKey());
            }
        }
        // rename any snapshots...in TARGET only.
        for (RemoteSnapshotRename toRename : context.getSnapshotsToRenameRemotely()) {
            final String sourceBucketKey = toRename.getSource().getBucketKey();
            final String targetBucketKey = toRename.getNewBucketKey();
            getLog().info(logPrefix + "Renaming: "
                + "s3://" + targetRepository.getBucketName() + "/" + sourceBucketKey
                + " => s3://" + targetRepository.getBucketName() + "/" + targetBucketKey);
            if (!doNotUpload) {
                s3Session.copyObject(targetBucket, sourceBucketKey, targetBucket, targetBucketKey);
                s3Session.deleteObject(targetBucket, sourceBucketKey);
            }
        }
View Full Code Here

        String logPrefix = "";
        if (doNotUpload) {
            getLog().info("Per configuration, we will NOT perform any remote operations on the S3 repository.");
            logPrefix = "SKIPPING: ";
        }
        final S3RepositoryPath targetRepository = context.getS3TargetRepositoryPath();
        final String targetBucket = targetRepository.getBucketName();
        AmazonS3 s3Session = context.getS3Session();
        File directoryToUpload = uploadMetadataOnly
                ? context.getLocalYumRepo().repoDataDirectory() // only the repodata directory
                : stagingDirectory; // the entire staging directory/bucket
        if (!allowCreateRepository && !context.getLocalYumRepo().isRepoDataExists()) {
            throw new MojoExecutionException("refusing to create new repo: " + targetRepository +
                " (use s3repo.allowCreateRepository = true to force)");
        }
        for (File toUpload : ExtraIOUtils.listAllFiles(directoryToUpload)) {
            final String bucketKey = localFileToTargetS3BucketKey(toUpload, context);
            getLog().info(logPrefix + "Uploading: " + toUpload.getName() + " => s3://" + targetRepository.getBucketName() + "/" + bucketKey + "...");
            if (!doNotUpload) {
                s3Session.putObject(new PutObjectRequest(targetBucket, bucketKey, toUpload));
            }
        }
        if (uploadMetadataOnly && !context.sourceAndTargetRepositoryAreSame()) {
            // we just uploaded metadata but there are files in the source repository
            // that don't exist in the target, so we upload those here.
            for (File toUpload : ExtraIOUtils.listAllFiles(stagingDirectory)) {
                if (!context.getFilesFromTargetRepo().contains(toUpload)) {
                    // upload if it's not already in the target repo.
                    final String bucketKey = localFileToTargetS3BucketKey(toUpload, context);
                    getLog().info(logPrefix + "Uploading: " + toUpload.getName()
                        + " => s3://" + targetRepository.getBucketName() + "/" + bucketKey + "...");
                    if (!doNotUpload) {
                        s3Session.putObject(new PutObjectRequest(targetBucket, bucketKey, toUpload));
                    }
                }
            }
        }
        // delete any excluded files remotely from the TARGET only.
        for (String repoRelativePath : context.getExcludedFilesToDeleteFromTarget()) {
            final String bucketKey = toBucketKey(targetRepository, repoRelativePath);
            getLog().info(logPrefix + "Deleting: "
                + "s3://" + targetRepository.getBucketName() + "/" + bucketKey + " (excluded file)");
            if (!doNotUpload) {
                context.getS3Session().deleteObject(targetBucket, bucketKey);
            }
        }
        // and finally, delete any remote bucket keys we wish to remove (e.g., old snaphots)...from the TARGET only.
        for (SnapshotDescription toDelete : context.getSnapshotsToDeleteRemotely()) {
            getLog().info(logPrefix + "Deleting: "
                + "s3://" + targetRepository.getBucketName() + "/" + toDelete.getBucketKey() + " (excluded file)");
            getLog().info(logPrefix + "Deleting: " + toDelete + " (old snapshot)");
            if (!doNotUpload) {
                context.getS3Session().deleteObject(targetBucket, toDelete.getBucketKey());
            }
        }
        // rename any snapshots...in TARGET only.
        for (RemoteSnapshotRename toRename : context.getSnapshotsToRenameRemotely()) {
            final String sourceBucketKey = toRename.getSource().getBucketKey();
            final String targetBucketKey = toRename.getNewBucketKey();
            getLog().info(logPrefix + "Renaming: "
                + "s3://" + targetRepository.getBucketName() + "/" + sourceBucketKey
                + " => s3://" + targetRepository.getBucketName() + "/" + targetBucketKey);
            if (!doNotUpload) {
                s3Session.copyObject(targetBucket, sourceBucketKey, targetBucket, targetBucketKey);
                s3Session.deleteObject(targetBucket, sourceBucketKey);
            }
        }
View Full Code Here

        String logPrefix = "";
        if (doNotUpload) {
            getLog().info("Per configuration, we will NOTE perform any remote operations on the S3 repository.");
            logPrefix = "SKIPPING: ";
        }
        final S3RepositoryPath targetRepository = context.getS3RepositoryPath();
        final String targetBucket = targetRepository.getBucketName();
        AmazonS3 s3Session = context.getS3Session();
        for (File toUpload : ExtraIOUtils.listAllFiles(stagingDirectory)) {
            String bucketKey = localFileToTargetS3BucketKey(toUpload, targetRepository);
            getLog().info(logPrefix + "Uploading: " + toUpload.getName() + " => s3://" + targetRepository.getBucketName() + "/" + bucketKey + "...");
            if (!doNotUpload) {
                s3Session.putObject(new PutObjectRequest(targetBucket, bucketKey, toUpload));
            }
        }
    }
View Full Code Here

        //      3) ensure that all files in the primary file list exist in the s3 repo
        //      4) "touch"/synthesize a zero-sized file for each file in the primary list
        if (context.getLocalYumRepo().isRepoDataExists()) { // if repo exists...
            // determine primary metadata file from metadata xml and parse it to determine repository files *declared* by the metadata
            List<String> repoRelativeFilePathList = context.getLocalYumRepo().parseFileListFromRepoMetadata();
            S3RepositoryPath s3RepositoryPath = context.getS3RepositoryPath();
            ListObjectsRequest request = new ListObjectsRequest()
                    .withBucketName(context.getS3RepositoryPath().getBucketName());
            if (s3RepositoryPath.hasBucketRelativeFolder()) {
                request.withPrefix(s3RepositoryPath.getBucketRelativeFolder() + "/");
            }
            List<S3ObjectSummary> result = S3Utils.listAllObjects(context.getS3Session(), request);
            // we will start with a set of metadata-declared files and remove any file we find that exists in the repo;
            // we expect the Set to be empty when finished iteration. note that s3 api returns bucket-relative
            // paths, so we prefix each of our repoRelativeFilePaths with the repository path.
            Set<String> bucketRelativePaths = new HashSet<String>();
            for (String repoRelativeFilePath : repoRelativeFilePathList) {
                if (s3RepositoryPath.hasBucketRelativeFolder()) {
                    bucketRelativePaths.add(s3RepositoryPath.getBucketRelativeFolder() + "/" + repoRelativeFilePath);
                } else {
                    bucketRelativePaths.add(repoRelativeFilePath);
                }
            }
            // for each bucket relative path in the listObjects result, remove from our set
View Full Code Here

        }
    }

    private S3RepositoryPath parseS3RepositoryPath() throws MojoExecutionException {
        try {
            S3RepositoryPath parsed = S3RepositoryPath.parse(s3RepositoryPath);
            if (parsed.hasBucketRelativeFolder()) {
                getLog().info("Using bucket '" + parsed.getBucketName() + "' and folder '" + parsed.getBucketRelativeFolder() + "' as repository...");
            } else {
                getLog().info("Using bucket '" + parsed.getBucketName() + "' as repository...");
            }
            return parsed;
        } catch (Exception e) {
            throw new MojoExecutionException("Failed to parse S3 repository path: " + s3RepositoryPath, e);
        }
View Full Code Here

            throw new MojoExecutionException("Bucket doesn't exist in S3: " + context.getS3RepositoryPath().getBucketName());
        }
    }

    private void pullExistingRepositoryMetadata(CreateOrUpdateContext context) throws MojoExecutionException {
        S3RepositoryPath s3RepositoryPath = context.getS3RepositoryPath();
        // build bucket-relative metadata folder path *with "/" suffix*
        String bucketRelativeMetadataFolderPath = WellKnowns.YUM_REPODATA_FOLDERNAME + "/";
        if (s3RepositoryPath.hasBucketRelativeFolder()) {
            // prefix repodata/ with repository folder
            bucketRelativeMetadataFolderPath = s3RepositoryPath.getBucketRelativeFolder() + "/" + bucketRelativeMetadataFolderPath;
        }
        ListObjectsRequest listObjectsRequest = new ListObjectsRequest()
                .withBucketName(s3RepositoryPath.getBucketName())
                .withPrefix(bucketRelativeMetadataFolderPath/*, which has "/" suffix*/);
        List<S3ObjectSummary> result = S3Utils.listAllObjects(context.getS3Session(), listObjectsRequest);
        getLog().debug("Found " + result.size() + " objects in bucket '" + s3RepositoryPath.getBucketName()
                + "' with prefix '" + bucketRelativeMetadataFolderPath + "'...");
        for (S3ObjectSummary summary : result) {
            final String asRepoRelativePath = S3Utils.toRepoRelativePath(summary, s3RepositoryPath);
            if (summary.getKey().endsWith("/")) {
                getLog().info("Downloading: "
                    + s3RepositoryPath + "/" + asRepoRelativePath + " => (skipping; it's a folder)");
                continue;
            }
            final S3Object object = context.getS3Session()
                    .getObject(new GetObjectRequest(s3RepositoryPath.getBucketName(), summary.getKey()));
            try {
                File targetFile = new File(stagingDirectory, asRepoRelativePath);
                getLog().info("Downloading: " + s3RepositoryPath + "/" + asRepoRelativePath + " => " + targetFile);
                Files.createParentDirs(targetFile);
                FileUtils.copyStreamToFile(new InputStreamFacade() {
View Full Code Here

        String logPrefix = "";
        if (doNotUpload) {
            getLog().info("Per configuration, we will NOTE perform any remote operations on the S3 repository.");
            logPrefix = "SKIPPING: ";
        }
        final S3RepositoryPath targetRepository = context.getS3RepositoryPath();
        final String targetBucket = targetRepository.getBucketName();
        AmazonS3 s3Session = context.getS3Session();
        for (File toUpload : ExtraIOUtils.listAllFiles(stagingDirectory)) {
            String bucketKey = localFileToTargetS3BucketKey(toUpload, targetRepository);
            getLog().info(logPrefix + "Uploading: " + toUpload.getName() + " => s3://" + targetRepository.getBucketName() + "/" + bucketKey + "...");
            if (!doNotUpload) {
                s3Session.putObject(new PutObjectRequest(targetBucket, bucketKey, toUpload));
            }
        }
    }
View Full Code Here

TOP

Related Classes of com.bazaarvoice.maven.plugin.s3repo.S3RepositoryPath

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.