Package org.apache.airavata.gfac.provider

Examples of org.apache.airavata.gfac.provider.GFacProviderException


            if (jobExecutionContext.getSecurityContext(AmazonSecurityContext.AMAZON_SECURITY_CONTEXT)
                    instanceof AmazonSecurityContext) {
                this.amazonSecurityContext = (AmazonSecurityContext) jobExecutionContext.
                        getSecurityContext(AmazonSecurityContext.AMAZON_SECURITY_CONTEXT);
            } else {
                throw new GFacProviderException("Amazon Security Context is not set" + jobExecutionContext);
            }
        } else {
            throw new GFacProviderException("Job Execution Context is null" + jobExecutionContext);
        }

        if (log.isDebugEnabled()) {
            log.debug("ACCESS_KEY:" + amazonSecurityContext.getAccessKey());
            log.debug("SECRET_KEY:" + amazonSecurityContext.getSecretKey());
            log.debug("AMI_ID:" + amazonSecurityContext.getAmiId());
            log.debug("INS_ID:" + amazonSecurityContext.getInstanceId());
            log.debug("INS_TYPE:" + amazonSecurityContext.getInstanceType());
            log.debug("USERNAME:" + amazonSecurityContext.getUserName());
        }
        saveApplicationJob(jobExecutionContext);
//        job
        /* Validation */
        if (amazonSecurityContext.getAccessKey() == null || amazonSecurityContext.getAccessKey().isEmpty())
            throw new GFacProviderException("EC2 Access Key is empty");
        if (amazonSecurityContext.getSecretKey() == null || amazonSecurityContext.getSecretKey().isEmpty())
            throw new GFacProviderException("EC2 Secret Key is empty");
        if ((amazonSecurityContext.getAmiId() == null && amazonSecurityContext.getInstanceId() == null) ||
                (amazonSecurityContext.getAmiId() != null && amazonSecurityContext.getAmiId().isEmpty()) ||
                (amazonSecurityContext.getInstanceId() != null && amazonSecurityContext.getInstanceId().isEmpty()))
            throw new GFacProviderException("EC2 AMI or Instance ID is empty");
        if (amazonSecurityContext.getUserName() == null || amazonSecurityContext.getUserName().isEmpty())
            throw new GFacProviderException("EC2 Username is empty");

        /* Need to start EC2 instance before running it */
        AWSCredentials credential =
                new BasicAWSCredentials(amazonSecurityContext.getAccessKey(), amazonSecurityContext.getSecretKey());
        AmazonEC2Client ec2client = new AmazonEC2Client(credential);
View Full Code Here


            OutputParameterType[] outputParametersArray = jobExecutionContext.getApplicationContext().
                    getServiceDescription().getType().getOutputParametersArray();
            if(outputParametersArray != null) {
                outParamName = outputParametersArray[0].getParameterName();
            } else {
                throw new GFacProviderException("Output parameter name is not set. Therefore, not being able " +
                        "to filter the job result from standard out ");
            }

            sshClient.connect(properties, new HostKeyVerification() {
                public boolean verifyHost(String s, SshPublicKey sshPublicKey) throws TransportProtocolException {
                    log.debug("Verifying Host: " + s);
                    return true;
                }
            });
            GFacUtils.updateApplicationJobStatus(jobExecutionContext, jobId, ApplicationJobStatus.AUTHENTICATE);
            // Initialize the authentication data.
            PublicKeyAuthenticationClient publicKeyAuth = new PublicKeyAuthenticationClient();
            publicKeyAuth.setUsername(amazonSecurityContext.getUserName());
            SshPrivateKeyFile file = SshPrivateKeyFile.
                    parse(new File(System.getProperty("user.home") + "/.ssh/" + KEY_PAIR_NAME));
            SshPrivateKey privateKey = file.toPrivateKey("");
            publicKeyAuth.setKey(privateKey);

            // Authenticate
            int result = sshClient.authenticate(publicKeyAuth);
            if(result== AuthenticationProtocolState.FAILED) {
                throw new GFacProviderException("The authentication failed");
            } else if(result==AuthenticationProtocolState.PARTIAL) {
                throw new GFacProviderException("The authentication succeeded but another"
                        + "authentication is required");
            } else if(result==AuthenticationProtocolState.COMPLETE) {
                log.info("ssh client authentication is complete...");
            }
            GFacUtils.updateApplicationJobStatus(jobExecutionContext, jobId, ApplicationJobStatus.SUBMITTED);
            SessionChannelClient session = sshClient.openSessionChannel();
            log.info("ssh session successfully opened...");
            session.requestPseudoTerminal("vt100", 80, 25, 0, 0, "");
            session.startShell();
           
            GFacUtils.updateApplicationJobStatus(jobExecutionContext, jobId, ApplicationJobStatus.EXECUTING);
            session.getOutputStream().write(shellCmd.getBytes());

            InputStream in = session.getInputStream();
            byte buffer[] = new byte[255];
            int read;
            String executionResult = "";
            while((read = in.read(buffer)) > 0) {
                String out = new String(buffer, 0, read);
//                System.out.println(out);

                if(out.startsWith(outParamName)) {
                    executionResult = out.split("=")[1];
                    log.debug("Result found in the StandardOut ");
                    break;
                }
            }
            GFacUtils.updateApplicationJobStatus(jobExecutionContext, jobId, ApplicationJobStatus.RESULTS_RETRIEVE);

            executionResult = executionResult.replace("\r","").replace("\n","");
            log.info("Result of the job : " + executionResult);

            for(OutputParameterType outparamType : outputParametersArray){
                /* Assuming that there is just a single result. If you want to add more results, update the necessary
                   logic below */
                String paramName = outparamType.getParameterName();
                ActualParameter outParam = new ActualParameter();
                outParam.getType().changeType(StringParameterType.type);
                ((StringParameterType) outParam.getType()).setValue(executionResult);
                jobExecutionContext.getOutMessageContext().addParameter(paramName, outParam);
            }
            GFacUtils.updateApplicationJobStatus(jobExecutionContext, jobId, ApplicationJobStatus.FINISHED);
        } catch (InvalidSshKeyException e) {
            throw new GFacProviderException("Invalid SSH key", e);
        } catch (IOException e) {
            throw new GFacProviderException("Error in occurred during IO", e);
        } catch (Exception e) {
            throw new GFacProviderException("Error parsing standard out for job execution result", e);
        }

    }
View Full Code Here

    private String setCmdParams(JobExecutionContext jobExecutionContext, String command) throws GFacProviderException {
        List<String> inputParams = null;
        try {
            inputParams = ProviderUtils.getInputParameters(jobExecutionContext);
        } catch (GFacProviderException e) {
            throw new GFacProviderException("Error in extracting input values from JobExecutionContext");
        }

        for(String param : inputParams){
            command = " " + command + " " + param;
        }
View Full Code Here

                        ec2client.describeInstances(describeInstancesRequest.
                                withInstanceIds(amazonSecurityContext.getInstanceId()));

                if (describeInstancesResult.getReservations().size() == 0 ||
                        describeInstancesResult.getReservations().get(0).getInstances().size() == 0) {
                    throw new GFacProviderException("Instance not found:" + amazonSecurityContext.getInstanceId());
                }

                instance = describeInstancesResult.getReservations().get(0).getInstances().get(0);

                // check instance keypair
                if (instance.getKeyName() == null || !instance.getKeyName().equals(KEY_PAIR_NAME)) {
                    throw new GFacProviderException("Keypair for instance:" + amazonSecurityContext.getInstanceId() +
                            " is not valid");
                }
            }

            jobExecutionContext.getNotificationService().publish(new EC2ProviderEvent("EC2 Instance " +
                    this.instance.getInstanceId() + " is running with public name " + this.instance.getPublicDnsName()));

        } catch (Exception e) {
            throw new GFacProviderException("Invalid Request",e);
        }

    }
View Full Code Here

                cluster = ((GSISecurityContext) jobExecutionContext.getSecurityContext(GSISecurityContext.GSI_SECURITY_CONTEXT)).getPbsCluster();
            } else {
                cluster = ((SSHSecurityContext) jobExecutionContext.getSecurityContext(SSHSecurityContext.SSH_SECURITY_CONTEXT)).getPbsCluster();
            }
            if (cluster == null) {
                throw new GFacProviderException("Security context is not set properly");
            } else {
                log.info("Successfully retrieved the Security Context");
            }

            // Get the Stdouts and StdErrs
View Full Code Here

    private void makeFileSystemDir(String dir) throws GFacProviderException {
        File f = new File(dir);
        if (f.isDirectory() && f.exists()) {
            return;
        } else if (!new File(dir).mkdir()) {
            throw new GFacProviderException("Cannot make directory " + dir);
        }
    }
View Full Code Here

            log.debug("RSL = " + rsl);
            GramJob job = new GramJob(rsl);
            return job;
        } catch (ToolsException te) {
            throw new GFacProviderException(te.getMessage(), te);
        }
    }
View Full Code Here

                    if(cpuCount>0){
                        app.setCpuCount(cpuCount);
                    }
                } catch (NullPointerException e) {
                    log.debug("No Value sent in WorkflowContextHeader for CPU Count, value in the Deployment Descriptor will be used");
                    new GFacProviderException("No Value sent in WorkflowContextHeader for Node Count, value in the Deployment Descriptor will be used",e);
                }
                try {
                    int nodeCount = currentContextHeader.getWorkflowSchedulingContext().getApplicationSchedulingContextArray()[0].getNodeCount();
                    if(nodeCount>0){
                        app.setNodeCount(nodeCount);
                    }
                } catch (NullPointerException e) {
                    log.debug("No Value sent in WorkflowContextHeader for Node Count, value in the Deployment Descriptor will be used");
                     new GFacProviderException("No Value sent in WorkflowContextHeader for Node Count, value in the Deployment Descriptor will be used",e);
                }
                try {
                    String queueName = currentContextHeader.getWorkflowSchedulingContext().getApplicationSchedulingContextArray()[0].getQueueName();
                    if (queueName != null) {
                        if(app.getQueue() == null){
                            QueueType queueType = app.addNewQueue();
                            queueType.setQueueName(queueName);
                        }else{
                            app.getQueue().setQueueName(queueName);
                        }
                    }
                } catch (NullPointerException e) {
                    log.debug("No Value sent in WorkflowContextHeader for Node Count, value in the Deployment Descriptor will be used");
                     new GFacProviderException("No Value sent in WorkflowContextHeader for Node Count, value in the Deployment Descriptor will be used",e);
                }
                try {
                    int maxwallTime = currentContextHeader.getWorkflowSchedulingContext().getApplicationSchedulingContextArray()[0].getMaxWallTime();
                    if(maxwallTime>0){
                        app.setMaxWallTime(maxwallTime);
                    }
                } catch (NullPointerException e) {
                    log.debug("No Value sent in WorkflowContextHeader for Node Count, value in the Deployment Descriptor will be used");
                     new GFacProviderException("No Value sent in WorkflowContextHeader for Node Count, value in the Deployment Descriptor will be used",e);
                }
            }
        }
//        if(currentContextHeader.getWorkflowOutputDataHandling() != null){
//            if(currentContextHeader.getWorkflowOutputDataHandling().getApplicationOutputDataHandlingArray().length != 0)
View Full Code Here

            isWhirrBasedDeployment = true;
        } else {
            String hadoopConfigDirPath = (String)inMessageContext.getParameter("HADOOP_CONFIG_DIR");
            File hadoopConfigDir = new File(hadoopConfigDirPath);
            if (!hadoopConfigDir.exists()){
                throw new GFacProviderException("Specified hadoop configuration directory doesn't exist.");
            } else if (FileUtils.listFiles(hadoopConfigDir, null, null).size() <= 0){
                throw new GFacProviderException("Cannot find any hadoop configuration files inside specified directory.");
            }

            this.hadoopConfigDir = hadoopConfigDir;
        }
    }
View Full Code Here

                }
            }
        } catch (Exception e) {
            String errMessage = "Error occurred during Map-Reduce job execution.";
            logger.error(errMessage, e);
            throw new GFacProviderException(errMessage, e);
        }
    }
View Full Code Here

TOP

Related Classes of org.apache.airavata.gfac.provider.GFacProviderException

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.