alvinalexander.com | career | drupal | java | mac | mysql | perl | scala | uml | unix  

What this is

This file is included in the DevDaily.com "Java Source Code Warehouse" project. The intent of this project is to help you "Learn Java by Example" TM.

Other links

The source code

/*
 *                 Sun Public License Notice
 * 
 * The contents of this file are subject to the Sun Public License
 * Version 1.0 (the "License"). You may not use this file except in
 * compliance with the License. A copy of the License is available at
 * http://www.sun.com/
 * 
 * The Original Code is NetBeans. The Initial Developer of the Original
 * Code is Sun Microsystems, Inc. Portions Copyright 1997-2003 Sun
 * Microsystems, Inc. All Rights Reserved.
 */

package org.openide.actions;

import java.util.Collections;
import java.util.Enumeration;
import java.util.HashSet;

import org.openide.loaders.DataObject;
import org.openide.awt.StatusDisplayer;
import org.openide.compiler.CompilerJob;
import org.openide.compiler.Compiler;
import org.openide.compiler.CompilerTask;
import org.openide.cookies.CompilerCookie;
import org.openide.util.enum.*;
import org.openide.util.actions.NodeAction;
import org.openide.nodes.Node;
import org.openide.util.NbBundle;
import org.openide.util.RequestProcessor;

/** Compilation action--compiles all selected nodes.
* Concrete subclasses must specify what type of
* compilation is needed (e.g. compile vs. build) and whether to operate
* recursively.
* @see org.openide.compiler
*
* @author   Jaroslav Tulach, Petr Hamernik
*/
public abstract class AbstractCompileAction extends NodeAction {

    /* Compiles the nodes */
    protected void performAction(final Node[] activatedNodes) {
        // Should not block EQ for this - compilation itself is done in the
        // compilation engine, but searching for objects to compile is done
        // synch.
        RequestProcessor.getDefault().post(new Runnable() {
            public void run() {
                compileNodes2(activatedNodes);
            }
        });
    }
    
    protected boolean asynchronous() {
        return false;
    }

    /** Checks whether the depth is supported.
    */
    protected boolean enable (Node[] arr) {
        if (arr.length == 0) {
            return false;
        }

        Class cookie = cookie ();
        Compiler.Depth depth = depth ();
        for (int i = 0; i < arr.length; i++) {
            CompilerCookie cc = (CompilerCookie)arr[i].getCookie (cookie);
            if (cc == null) {
                if (depth.isLastDepth () || arr[i].getCookie (DataObject.Container.class) == null) {
                    return false;
                }
            } else {
                if (!cc.isDepthSupported (depth)) {
                    return  false;
                }
            }
        }
        return true;
    }

    /** Get the depth the compiler compiles on.
    * @return depth for the job that this compiler works on
    */
    protected abstract Compiler.Depth depth ();


    /** Get the requested cookie class.
    * @return the class, e.g. {@link org.openide.cookies.CompilerCookie.Compile}
    */
    protected abstract Class cookie ();

    /** Message to display when the action is looking for
    * object that should be processed.
    *
    * @return text to display at status line
    */
    protected String message () {
        return NbBundle.getMessage(Compiler.class, "CTL_CompilationStarted");
    }

    /** Compile some data objects.
     * @since org.openide.compiler 1.0
     */
    public static boolean compileDataObjects(DataObject[] objects) {
        HashSet compile = new HashSet ();
        for (int i = 0; i < objects.length; i++) {
            CompilerCookie comp = (CompilerCookie) objects[i].getCookie(CompilerCookie.Compile.class);
            if (comp != null) {
                compile.add(comp);
            }
        }
        return compile(Collections.enumeration(compile), findName(objects));
    }
    
    /** Compile some nodes.
     * @since org.openide.compiler 1.0
     */
    public static boolean compileNodes(Node[] nodes) {
        HashSet compile = new HashSet ();
        for (int i = 0; i < nodes.length; i++) {
            CompilerCookie comp = (CompilerCookie) nodes[i].getCookie(CompilerCookie.Compile.class);
            if (comp != null) {
                compile.add(comp);
            }
        }
        return compile(Collections.enumeration(compile), findName(nodes));
    }

    /** Compiles a set of nodes.
    * @param nodes the nodes
    */
    void compileNodes2 (final Node[] nodes) {
        StatusDisplayer.getDefault().setStatusText(message());
        CompilerJob job;
        DataObject[] objects;
        boolean useNodes;
        try {

            job = new CompilerJob (depth ());
            // enumeration of CompilerCookies and nulls
            prepareJobFor(job, new ArrayEnumeration (nodes), cookie (), depth ());

            objects = new DataObject[nodes.length];
            useNodes = false;
            for (int i = 0; i < nodes.length; i++) {
                objects[i] = (DataObject)nodes[i].getCookie (DataObject.class);
                if (objects[i] == null) {
                    useNodes = true;
                    break;
                }
            }

        } finally { // #10889
            // In case some RuntimeException during creation of job,
            // best to clear the status line since no one else will.
            StatusDisplayer.getDefault().setStatusText(""); // NOI18N
        }

        job.setDisplayName (useNodes ? findName (nodes) : findName(objects));
        job.start ();
    }


    /** Finds the right name for the compilation of the nodes
    * @param nodes the set of nodes
    * @return the name
    */
    static String findName (Node[] nodes) {
        String name = ""; // NOI18N
        
        if (nodes.length > 0) {
            Node n = nodes[0];
            Node.Cookie ck = null;
            do {
             ck = n.getCookie(DataObject.class);
             if (ck != null) {
                 DataObject d = (DataObject)ck;
                 name = d.isValid() ? d.getNodeDelegate().getDisplayName() : ""; // NOI18N
                 break;
             }
             n = n.getParentNode();
            } while (n != null);
        }
        return NbBundle.getMessage(Compiler.class, "FMT_Compile", new Integer(nodes.length), name);
    }

    /** Finds the right name for the compilation of the DataObjects
    * @param dataObjects the set of DataObjects
    * @return the name
    */
    static String findName (DataObject[] dataObjects) {
        return NbBundle.getMessage(Compiler.class, "FMT_Compile",
                            new Integer (dataObjects.length),
                            (dataObjects.length > 0 && dataObjects[0].isValid())
                                ? dataObjects[0].getNodeDelegate().getDisplayName()
                                : "" // NOI18N
                        );
    }

    /** Create a job for compilation over a set of cookies.
    * @param en enumeration of {@link CompilerCookie}
    * @param depth the requested depth
    * @return the compiler job
    */
    public static CompilerJob createJob (Enumeration en, Compiler.Depth depth) {
        CompilerJob job = new CompilerJob (depth);
        prepareJobFor (job, en, CompilerCookie.class, depth, null);
        return job;
    }

    /** Compile a number of files.
    * Should actually be files, not directories (i.e. a zero depth will be used).
    * @param compileCookies enumeration of {@link CompilerCookie}
    * @param name name of the job to use
    * @return true if compilation was successful, false if there was some sort of error
    */
    public static boolean compile(Enumeration compileCookies, String name) {
        CompilerJob job = createJob(compileCookies, Compiler.DEPTH_ZERO);
        job.setDisplayName(name);
        if (! job.isUpToDate()) {
            CompilerTask task = job.start();
            return task.isSuccessful();
        } else {
            return true;
        }
    }
    
    private static Node.Cookie findCookie(DataObject o, Class cookie) {
        if (!CompilerCookie.class.isAssignableFrom(cookie)) throw new ClassCastException(cookie.getName());
        Node.Cookie c = o.getCookie(cookie);
        if (c != null) return c;
        if (!(o instanceof DataObject.Container)) return null;
        if (!cookie.isAssignableFrom(Cmp.class)) return null;
        return new Cmp((DataObject.Container)o, cookie);
    }
    
    private static Node.Cookie findCookie(Node n, Class cookie) {
        if (!CompilerCookie.class.isAssignableFrom(cookie)) throw new ClassCastException(cookie.getName());
        Node.Cookie c = n.getCookie(cookie);
        if (c != null) return c;
        DataObject o = (DataObject)n.getCookie(DataObject.class);
        if (o == null) return null;
        return findCookie(o, cookie);
    }

    /** The compiler cookie for the folder.
     * Do not rename without consulting reference in DataFolder.getCookie.
     */
    private static final class Cmp extends Object
    implements CompilerCookie.Compile, CompilerCookie.Build, CompilerCookie.Clean {
        private DataObject.Container folder;
        /** which cookie the compiler needs */
        private Class cookieClass;

        /* Creates new compiler for the given folder. */
        public Cmp (DataObject.Container folder, Class cookieClass) {
            this.folder = folder;
            this.cookieClass = cookieClass;
        }

        /** Supports all depths.
        * @param depth the depth to test
        * @return true
        */
        public boolean isDepthSupported (Compiler.Depth depth) {
            return true;
        }

        /** A method that allows the cookie to add its compiler(s)
        * into a compiler job. The depth parameter specifies whether
        * the cookie should continue with its children or not.
        * If the depth.isLastDepth () is true then no children
        * should be processed. Otherwise process the children with
        * new cookie obtained by a call to depth.nextDepth ().
        *
        * @param job the compiler job to add the compiler for this cookie to
        * @param depth the depth to use for compilation
        *
        * @see org.openide.compiler.CompilerJob
        * @see org.openide.compiler.Compiler.Depth
        */
        public void addToJob (CompilerJob job, Compiler.Depth depth) {
            // do nothing if this is the last depth
            if (depth.isLastDepth ()) return;

            // add to the job children
            depth = depth.nextDepth ();

            // process all children on given depth
            prepareJobFor (job, new ArrayEnumeration(folder.getChildren()), cookieClass, depth, this);
        }

        /** Package method for CompilerSupport.prepareJob, to check whether I am the created
         * compiler for the given object or not.
         *
         * @param obj either data folder or data node of the folder
         * @return true if this object has been created just for the object
         */
        boolean wasCreatedFor (Object obj) {
            if (obj == folder) {
                return true;
            }

            if (obj == ((DataObject)folder).getNodeDelegate()) {
                return true;
            }

            return false;
        }

        public int hashCode () {
            return folder.hashCode ();
        }

        public boolean equals (Object o) {
            if (o instanceof Cmp) {
                Cmp c = (Cmp)o;
                return c.wasCreatedFor (folder);
            }
            return false;
        }

    } // end of Cmp
    
    /** Utility method to handle compilation on given set of DataObject.
     * Extracts compiler cookies from the provided enumeration of 
     * DataObjects and adds all of them into the provided compiler job.
     *
     * @param job compiler job to add objects to
     * @param en enumeration of DataObject or Node objects
     * @param type class that should be requested as a cookie
     *   (CompilerCookie.Compile,CompilerCookie.Build,
     *   CompilerCookie.Clean)
     * @param depth the initial depth of compilation to start with
     * @since org.openide.compiler 1.0
     */
    public static void prepareJobFor(CompilerJob job, Enumeration en, Class type, Compiler.Depth depth) {
        prepareJobFor(job, en, type, depth, null);
    }
    
    /** Utility method to handle compilation on given set of DataObject.
     * Extracts compiler cookies from the provided enumeration of 
     * DataObjects and adds all of them into the provided compiler job.
     * @param job compiler job to add objects to
     * @param en enumeration of DataObject or Node objects
     * @param type class that should be requested as a cookie
     *   (CompilerCookie.Compile,CompilerCookie.Build,
     *   CompilerCookie.Clean)
     * @param depth the initial depth of compilation to start with
     */
    private static void prepareJobFor (
        org.openide.compiler.CompilerJob job, Enumeration en, Class type, Compiler.Depth depth, Object forObj
    ) {
        // JST: Well, the method handles also the case when en is enumeration
        // of nodes, AbstractCompileAction depends on this, but I do not want
        // to state it in the public documentation, IMHO it would just mix
        // apples and bananas...

        HashSet processsedObjects = new HashSet (37);
        if (forObj != null) {
            processsedObjects.add (forObj);
        }
        

        Enumeration cookies = new FilterEnumeration (
            enumerateForDepth (type, en, depth, processsedObjects)
        );
        while (cookies.hasMoreElements ()) {
            CompilerCookie cc = (CompilerCookie)cookies.nextElement ();
            cc.addToJob (job, depth);
        }
    }
    
    /** Enumerates nodes for given depth, that means that if there is a 
     * DataObject.Container and the depth is not empty, then the container's 
     * objects are added into the compilation.
     */
    private static Enumeration enumerateForDepth(
        final Class cookie, Enumeration nodesOrDataObjects, final Compiler.Depth depth, HashSet processedObjects
    ) {
        // enum of Enums of Node or DataObject
        QueueEnumeration queue = new QueueEnumeration ();
        queue.put (
            processArray (nodesOrDataObjects, cookie, queue, depth, processedObjects)
        );
        
        return new SequenceEnumeration (queue);
    }
    
    /** Processes enumeration of nodes or DataObjects for given depth
     * @param en enumeration of DataObject or Nodes
     * @param cookie the cookie we need in the result
     * @param queue enum of enums of DataObject or Nodes, when doing recursion add the objects
     *    into the enum
     * @param depth the depth all objects in enum are at
     */
    private static Enumeration processArray(Enumeration en, final Class cookie, final QueueEnumeration queue, final Compiler.Depth depth, final HashSet processsedObjects) {
        return new AlterEnumeration (en) {
            public Object alter (Object o) {
                return processObject (o, cookie, queue, depth, processsedObjects);
            }
        };
    }
    
    /** Creates enumeration that processes given array of objects.
     * @param arr array of DataObject or Nodes
     * @param cookie which kind of cookie should be requested
     * @param queue enumeration of DataObjects and Nodes to add objects to
     * @param depth process everything on given depth
     * @return compiler cookie or null
     */
    private static org.openide.cookies.CompilerCookie processObject(
        Object obj, final Class cookie, final QueueEnumeration queue, final Compiler.Depth depth, final HashSet processsedObjects
    ) {
        if (processsedObjects.contains (obj)) {
            return null;
        }
        
        CompilerCookie c = (CompilerCookie)resolveCookie (obj, cookie);

        if (c != obj) {
            // mark the object as processed only if not the same as cookie.
            // otherwise obj implements the cookie directly => and the cookie
            // will be processed few lines bellow...
            processsedObjects.add (obj);
        }
            

        // JST: How to code this? We do not want to use the DataFolder.Cmp 
        // cookie, but only if it is provided with regular DataFolder, not
        // one overriden...
        if (c instanceof Cmp) {
            Cmp fc = (Cmp)c;

            if (fc.wasCreatedFor (obj)) {
                // if the compiler was created for the obj (DataFolder or
                // its node) then do not use it...
                c = null;
            }
        }
            
      
        if (c != null) {
            // mark the cookie as processed...
            if (processsedObjects.contains (c)) {
                return null;
            } else {
                processsedObjects.add (c);
                return c;
            }
        } else {
            // check for container cookie and is such case
            // process all contained objects
            if (!depth.isLastDepth ()) {
                DataObject.Container cont = (DataObject.Container)resolveCookie (
                    obj, DataObject.Container.class
                );
                if (cont != null) {
                    DataObject[] arr = cont.getChildren ();
                    Enumeration en = new ArrayEnumeration (arr);
                    queue.put (processArray (
                        en, cookie, queue, depth.nextDepth (), processsedObjects
                    ));
                }
            }
            return null;
        }
    }
    
    /** General purpose method that looks for a cookie in an object.
     * @param obj Node or DataObject or a Node.Cookie itself.
     */
    private static Object resolveCookie(Object obj, Class cookie) {
        Object res = null;
        
        if (obj instanceof DataObject) {
            res = ((DataObject)obj).getCookie (cookie);
        } else {
            if (obj instanceof Node) {
                // it is a node
                res = ((Node)obj).getCookie (cookie);
            } else {
                if (cookie.isInstance (obj)) {
                    res = obj;
                }
            }
        }

        return res;
    }
    
}
... this post is sponsored by my books ...

#1 New Release!

FP Best Seller

 

new blog posts

 

Copyright 1998-2021 Alvin Alexander, alvinalexander.com
All Rights Reserved.

A percentage of advertising revenue from
pages under the /java/jwarehouse URI on this website is
paid back to open source projects.