Examples of EStreamTokenizer


Examples of edu.cmu.cs.stage3.io.EStreamTokenizer

    return map;
    }

    private Model parseASF(InputStream is) throws java.io.IOException {

      EStreamTokenizer tokenizer;

        // to convert to rotations and meters
        skel.anglescale=1.0;
        skel.lengthscale=.0254;

        ASFBone bone;

        // load a skeleton
        edu.cmu.cs.stage3.alice.authoringtool.util.BackslashConverterFilterInputStream bcfis = new edu.cmu.cs.stage3.alice.authoringtool.util.BackslashConverterFilterInputStream( is );
        java.io.BufferedReader br = new java.io.BufferedReader( new java.io.InputStreamReader( bcfis ) );
        tokenizer = new EStreamTokenizer( br );

        tokenizer.commentChar('#');
        tokenizer.eolIsSignificant( true );
        tokenizer.lowerCaseMode( true );
        tokenizer.parseNumbers();
        tokenizer.wordChars( '_', '_' );
        tokenizer.wordChars( ':', ':' );

        // find the units section or root section if there is no unit section (assume default units)
        while (tokenizer.ttype!=StreamTokenizer.TT_WORD || !(tokenizer.sval.equals(":units")||tokenizer.sval.equals(":root"))) {
            tokenizer.nextToken();
        }

        // if there is a units section...
        if (tokenizer.sval.equals(":units")) {
            tokenizer.nextToken();
        // **************************************
        // **    Parse the :units section      **
        // **************************************
            //System.out.println("Parsing :units ...");
            while (tokenizer.ttype!=StreamTokenizer.TT_WORD || tokenizer.sval.charAt(0)!=':') {
                if (tokenizer.ttype!=StreamTokenizer.TT_WORD) {
                    tokenizer.nextToken();
                    continue;
                }
                // adjust the length conversion
                if (tokenizer.sval.equals("length")) {
                    tokenizer.nextToken();
                    skel.lengthscale/=tokenizer.nval;
                // adjust the angle conversion
                } else if (tokenizer.sval.equals("angle")) {
                    tokenizer.nextToken();
                    if (tokenizer.sval.equals("deg")) {
                        skel.anglescale = Math.PI/180; // 1/360;
                    }
                }
                tokenizer.nextToken();
            }

            // now find the root section
            while (tokenizer.ttype!=StreamTokenizer.TT_WORD || !tokenizer.sval.equals(":root")) {
                tokenizer.nextToken();
            }
            tokenizer.nextToken();
        }
        // **************************************
        // **     Parse the :root section      **
        // **************************************
        //System.out.println("Parsing :root ...");

        skel.bones.addElement(new ASFBone("root"));
        skel.bones_dict.put("root",skel.bones.lastElement());
        bone = skel.getRoot();

        while (tokenizer.ttype!=StreamTokenizer.TT_WORD || tokenizer.sval.charAt(0)!=':') {
            if (tokenizer.ttype!=StreamTokenizer.TT_WORD) {
                tokenizer.nextToken();
                continue;
            }

            if (tokenizer.sval.equals("position")) {
                tokenizer.nextToken();
                bone.base_position.x = tokenizer.nval*skel.lengthscale;
                tokenizer.nextToken();
                bone.base_position.y = tokenizer.nval*skel.lengthscale;
                tokenizer.nextToken();
                bone.base_position.z = -tokenizer.nval*skel.lengthscale;
            } else if (tokenizer.sval.equals("orientation")) {
                tokenizer.nextToken();
                bone.base_axis.rotateX(-tokenizer.nval*skel.anglescale);
                tokenizer.nextToken();
                bone.base_axis.rotateY(-tokenizer.nval*skel.anglescale);
                tokenizer.nextToken();
                bone.base_axis.rotateZ(tokenizer.nval*skel.anglescale);
            } else if (tokenizer.sval.equals("order")) {
                while (tokenizer.nextToken()!=StreamTokenizer.TT_EOL) {
                    if(tokenizer.sval.equals("tx")) {
                        bone.dof.addElement(ASFBone.DOF_TX);
                    } else if (tokenizer.sval.equals("ty")) {
                        bone.dof.addElement(ASFBone.DOF_TY);
                    } else if (tokenizer.sval.equals("tz")) {
                        bone.dof.addElement(ASFBone.DOF_TZ);
                    } else if (tokenizer.sval.equals("rx")) {
                        bone.dof.addElement(ASFBone.DOF_RX);
                    } else if (tokenizer.sval.equals("ry")) {
                        bone.dof.addElement(ASFBone.DOF_RY);
                    } else if (tokenizer.sval.equals("rz")) {
                        bone.dof.addElement(ASFBone.DOF_RZ);
                    }
                }
            }
            tokenizer.nextToken();
        }

        // find the :bonedata section
        while (tokenizer.ttype!=StreamTokenizer.TT_WORD || !tokenizer.sval.equals(":bonedata")) {
            tokenizer.nextToken();
        }
        tokenizer.nextToken();

        // **************************************
        // **  Parse the :bonedata section     **
        // **************************************
        //System.out.println("Parsing :bonedata ...");

        while (tokenizer.ttype!=StreamTokenizer.TT_WORD || tokenizer.sval.charAt(0)!=':') {
            // find a bone
            while (tokenizer.ttype!=StreamTokenizer.TT_WORD || (tokenizer.sval.charAt(0)!=':' && !tokenizer.sval.equals("begin")))
                tokenizer.nextToken();

            if (tokenizer.sval.charAt(0)==':')
                break;
            tokenizer.nextToken();

            //System.out.print("Parsing Bone ");
            // parse the bone
            bone = new ASFBone();

            while (tokenizer.ttype!=StreamTokenizer.TT_WORD || !tokenizer.sval.equals("end")) {
                if (tokenizer.ttype!=StreamTokenizer.TT_WORD) {
                    tokenizer.nextToken();
                    continue;
                }

                if (tokenizer.sval.equals("axis")) {
                    tokenizer.nextToken();
                    bone.base_axis.rotateX(-tokenizer.nval*skel.anglescale);
                    tokenizer.nextToken();
                    bone.base_axis.rotateY(-tokenizer.nval*skel.anglescale);
                    tokenizer.nextToken();
                    bone.base_axis.rotateZ(tokenizer.nval*skel.anglescale);
                } else if (tokenizer.sval.equals("direction")) {
                    tokenizer.nextToken();
                    bone.direction.x = tokenizer.nval;
                    tokenizer.nextToken();
                    bone.direction.y = tokenizer.nval;
                    tokenizer.nextToken();
                    bone.direction.z = -tokenizer.nval;
                } else if (tokenizer.sval.equals("length")) {
                    tokenizer.nextToken();
                    bone.length = tokenizer.nval*skel.lengthscale;
                } else if (tokenizer.sval.equals("name")) {
                    tokenizer.nextToken();
                    bone.name = tokenizer.sval;
                    //System.out.println(bone.name);
                } else if (tokenizer.sval.equals("dof")) {
                    while (tokenizer.nextToken()!=StreamTokenizer.TT_EOL) {
                        if(tokenizer.sval.equals("tx")) {
                            bone.dof.addElement(ASFBone.DOF_TX);
                        } else if (tokenizer.sval.equals("ty")) {
                            bone.dof.addElement(ASFBone.DOF_TY);
                        } else if (tokenizer.sval.equals("tz")) {
                            bone.dof.addElement(ASFBone.DOF_TZ);
                        } else if (tokenizer.sval.equals("rx")) {
                            bone.dof.addElement(ASFBone.DOF_RX);
                        } else if (tokenizer.sval.equals("ry")) {
                            bone.dof.addElement(ASFBone.DOF_RY);
                        } else if (tokenizer.sval.equals("rz")) {
                            bone.dof.addElement(ASFBone.DOF_RZ);
                        } else if (tokenizer.sval.equals("l")) {
                            bone.dof.addElement(ASFBone.DOF_L);
                        }
                    }
                }
                tokenizer.nextToken();

            }
            tokenizer.nextToken();

            skel.bones.addElement(bone);
            skel.bones_dict.put(bone.name,bone);
        }

        // find the :hierarchy section
        while (tokenizer.ttype!=StreamTokenizer.TT_WORD || !tokenizer.sval.equals(":hierarchy")) {
            tokenizer.nextToken();
        }
        tokenizer.nextToken();

        while (tokenizer.ttype!=StreamTokenizer.TT_WORD || !tokenizer.sval.equals("begin")) {
            tokenizer.nextToken();
        }
        tokenizer.nextToken();

        // **************************************
        // **  Parse the :hierarchy section    **
        // **************************************

        while (tokenizer.ttype!=StreamTokenizer.TT_WORD || !tokenizer.sval.equals("end")) {
            if (tokenizer.ttype!=StreamTokenizer.TT_WORD) {
                tokenizer.nextToken();
                continue;
            }
            bone = (ASFBone)skel.bones_dict.get(tokenizer.sval);

            while (tokenizer.nextToken()!=StreamTokenizer.TT_EOL) {
                bone.children.addElement(skel.bones_dict.get(tokenizer.sval));
            }
        }

        return skel.buildBones();
View Full Code Here

Examples of edu.cmu.cs.stage3.io.EStreamTokenizer

        return super.load(url);
  }

    protected Element load(InputStream is, String ext) throws java.io.IOException {
        EStreamTokenizer tokenizer;
        edu.cmu.cs.stage3.alice.authoringtool.util.BackslashConverterFilterInputStream bcfis = new edu.cmu.cs.stage3.alice.authoringtool.util.BackslashConverterFilterInputStream( is );
        java.io.BufferedReader br = new java.io.BufferedReader( new java.io.InputStreamReader( bcfis ) );
        tokenizer = new EStreamTokenizer( br );

        tokenizer.commentChar('#');
        tokenizer.eolIsSignificant( false );
        tokenizer.lowerCaseMode( true );
        tokenizer.parseNumbers();
        tokenizer.wordChars( '_', '_' );
        tokenizer.wordChars( ':', ':' );

        // read the headers and stop at the :degrees section
        String ASFfilename = "";
        String ASFpath = "";
        double dt = 1.0/nativeFPS;

        while (tokenizer.ttype!=StreamTokenizer.TT_WORD || !tokenizer.sval.equals(":degrees")) {
            if (tokenizer.ttype!=StreamTokenizer.TT_WORD) {
                tokenizer.nextToken();
                continue;
            }
            if (tokenizer.sval.equals(":asf-file")) {
                tokenizer.nextToken();
                ASFfilename = tokenizer.sval;
            } else if (tokenizer.sval.equals(":asf-path")) {
                tokenizer.nextToken();
                ASFpath = tokenizer.sval;
            } else if (tokenizer.sval.equals(":samples-per-second")) {
                tokenizer.nextToken();
                nativeFPS = (int)tokenizer.nval;
                dt = 1.0/nativeFPS;
            }
            tokenizer.nextToken();
        }

        File ASFfile = new File("");

        if (!ASFfilename.equals("")) {
            ASFfile = new File(ASFfilename);
            if (!ASFfile.isFile()) {
                int i;
                int previ=0;
                for (i=ASFpath.indexOf(";"); i!=-1; i=ASFpath.indexOf(";",i+1)) {
                    String temp = ASFpath.substring(previ,i-1);
                    if (!temp.endsWith(File.separator) && !temp.equals("")) temp = temp.concat(File.separator);
                    ASFfile = new File(temp.concat(ASFfilename));
                    if (ASFfile.isFile()) break;
                    previ=i+1;
                }
                if (!ASFfile.isFile()) {
                    String temp = ASFpath.substring(previ);
                    if (!temp.endsWith(File.separator) && !temp.equals("")) temp = temp.concat(File.separator);
                    ASFfile = new File(temp.concat(ASFfilename));
                }
                if (!ASFfile.isFile()) {
                    String temp = AMCPath;
                    if (!temp.endsWith(File.separator) && !temp.equals("")) temp = temp.concat(File.separator);
                    ASFfile = new File(temp.concat(ASFfilename));
                }
            }

        }


        MocapImporterOptionsDialog optionsDialog = new MocapImporterOptionsDialog();

        if (ASFfile.isFile()) {
            optionsDialog.setASFFile(ASFfile.getPath());
            optionsDialog.setASFPath(ASFfile.getParentFile());
        } else
            optionsDialog.setASFPath(new File(AMCPath));
        optionsDialog.setNativeFPS(nativeFPS);
        optionsDialog.pack();
        optionsDialog.show();

        if (optionsDialog.ok==false)
            return null;

        applyTo = (Model)optionsDialog.getSelectedPart();
        ASFfile = new File(optionsDialog.getASFFile());
        fps = optionsDialog.getFPS();
        nativeFPS = optionsDialog.getNativeFPS();
        dt = 1.0/nativeFPS;

        if (!ASFfile.isFile()) return null;

        InputStream ASFis = new FileInputStream(ASFfile);
        skel = (new ASFImporter()).loadSkeleton(ASFis);
        ASFis.close();

        int samplenumber = 0;

        if (applyTo==null) {
            applyTo = skel.getRoot().model;
        }
        skel.setBasePose(applyTo);

        //System.out.println("Parsing Motion...");
        tokenizer.nextToken();
        while (tokenizer.ttype!=StreamTokenizer.TT_EOF) {
            samplenumber = (int)tokenizer.nval;
            tokenizer.nextToken();

            ASFBone bone;

            while (tokenizer.ttype==StreamTokenizer.TT_WORD) {
                bone = (ASFBone)skel.bones_dict.get(tokenizer.sval);
                tokenizer.nextToken();

                bone.position = (Vector3)bone.base_position.clone();
                bone.axis = new Matrix33();

                ListIterator li2;
                li2 = bone.dof.listIterator();
                while (li2.hasNext()) {
                    Integer d = (Integer)li2.next();
                    if (d.equals(ASFBone.DOF_L)) {
                        //bone.lengthSpline.addKey(new DoubleSimpleKey((samplenumber-1)*dt,tokenizer.nval*skel.lengthscale));
                    } else if (d.equals(ASFBone.DOF_TX)) {
                        bone.position.x = tokenizer.nval*skel.lengthscale;
                    } else if (d.equals(ASFBone.DOF_TY)) {
                        bone.position.y = tokenizer.nval*skel.lengthscale;
                    } else if (d.equals(ASFBone.DOF_TZ)) {
                        bone.position.z = -tokenizer.nval*skel.lengthscale;
                    } else if (d.equals(ASFBone.DOF_RX)) {
                        bone.axis.rotateX(-tokenizer.nval*skel.anglescale);
                    } else if (d.equals(ASFBone.DOF_RY)) {
                        bone.axis.rotateY(-tokenizer.nval*skel.anglescale);
                    } else if (d.equals(ASFBone.DOF_RZ)) {
                        bone.axis.rotateZ(tokenizer.nval*skel.anglescale);
                    }
                    tokenizer.nextToken();
                }

                if ((bone.lastTime+(1.0/fps)) <= ((samplenumber-1)*dt) ) {
                    bone.lastTime = ((samplenumber-1)*dt);
                    bone.hasFrame = true;
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.