├── README.md ├── ivy.xml ├── ivy ├── hadoop-client-pom-template.xml ├── hadoop-core-pom-template.xml ├── hadoop-core.pom ├── hadoop-examples-pom-template.xml ├── hadoop-minicluster-pom-template.xml ├── hadoop-streaming-pom-template.xml ├── hadoop-test-pom-template.xml ├── hadoop-tools-pom-template.xml ├── ivy-2.1.0.jar ├── ivysettings.xml └── libraries.properties ├── release ├── hadoop-eclipse-kepler-plugin-2.2.0.jar ├── hadoop-eclipse-kepler-plugin-2.4.1.jar └── hadoop-eclipse-plugin-2.6.0.jar └── src ├── contrib ├── build-contrib.xml ├── build.xml └── eclipse-plugin │ ├── .project │ ├── .settings │ ├── org.eclipse.jdt.core.prefs │ ├── org.eclipse.jdt.ui.prefs │ └── org.eclipse.wst.validation.prefs │ ├── META-INF │ └── MANIFEST.MF │ ├── build.properties │ ├── build.xml │ ├── build.xml.bak │ ├── ivy.xml │ ├── ivy │ └── libraries.properties │ ├── makePlus.sh │ ├── plugin.xml │ ├── resources │ ├── Components │ │ ├── Conf.png │ │ ├── Export.png │ │ ├── Import.png │ │ ├── New.png │ │ ├── Reload.png │ │ ├── Tool.png │ │ └── Tools.png │ ├── ConnectDFS.xml │ ├── CreateProj.xml │ ├── Elephant-16x16.png │ ├── Elephant-24x24.png │ ├── Elephant-32x32.png │ ├── Elephant-64x64.png │ ├── Elephant-small-16x16.png │ ├── Elephant.jpg │ ├── Elephant100x100.gif │ ├── Elephant16x16.gif │ ├── Elephant2-136x136.png │ ├── Elephant2-16x16.png │ ├── Elephant2-24x24.png │ ├── Elephant2-32x32.png │ ├── Elephant2-64x64.png │ ├── Elephant2.jpg │ ├── Elephant3-122x122.png │ ├── Elephant3-16x16.png │ ├── Elephant3-24x24.png │ ├── HelloWorld.xml │ ├── MAP100x100.gif │ ├── MAP16x15.gif │ ├── RunProj.xml │ ├── SetHadoopPath.xml │ ├── Setup.xml │ ├── download.png │ ├── drive100x100.gif │ ├── drive16x16.gif │ ├── driver.png │ ├── driverwiz.png │ ├── elephantblue16x16.gif │ ├── files.gif │ ├── hadoop-logo-16x16.png │ ├── hadoop-logo-24x24.png │ ├── hadoop-logo-85x85.png │ ├── hadoop-logo.jpg │ ├── hadoop.gif │ ├── hadoop_small.gif │ ├── job.gif │ ├── location-edit-16x16.png │ ├── location-new-16x16.png │ ├── map16x16.gif │ ├── mapper16.png │ ├── mapwiz.png │ ├── new-folder.png │ ├── projwiz.png │ ├── reduce100x100.gif │ ├── reduce16x16.gif │ ├── reducer-16x16.gif │ ├── reducer16.png │ ├── reducewiz.png │ ├── refresh.png │ ├── spite_overcloud.png │ ├── spitesmall.gif │ ├── spitesmall.png │ └── upload.png │ └── src │ └── java │ └── org │ └── apache │ └── hadoop │ └── eclipse │ ├── Activator.java │ ├── ErrorMessageDialog.java │ ├── HadoopPerspectiveFactory.java │ ├── ImageLibrary.java │ ├── MapReduceNature.java │ ├── NewDriverWizard.java │ ├── NewDriverWizardPage.java │ ├── NewMapReduceProjectWizard.java │ ├── NewMapperWizard.java │ ├── NewReducerWizard.java │ ├── PropertyTester.java │ ├── actions │ ├── DFSActionImpl.java │ ├── EditLocationAction.java │ ├── NewLocationAction.java │ ├── OpenNewMRClassWizardAction.java │ └── OpenNewMRProjectAction.java │ ├── dfs │ ├── ActionProvider.java │ ├── DFSActions.java │ ├── DFSContent.java │ ├── DFSContentProvider.java │ ├── DFSFile.java │ ├── DFSFolder.java │ ├── DFSLocation.java │ ├── DFSLocationsRoot.java │ ├── DFSMessage.java │ └── DFSPath.java │ ├── launch │ ├── HadoopApplicationLaunchShortcut.java │ ├── LocalMapReduceLaunchTabGroup.java │ ├── MutexRule.java │ └── StartHadoopLaunchTabGroup.java │ ├── preferences │ ├── MapReducePreferencePage.java │ ├── PreferenceConstants.java │ └── PreferenceInitializer.java │ ├── server │ ├── ConfProp.java │ ├── HadoopJob.java │ ├── HadoopPathPage.java │ ├── HadoopServer.java │ ├── IJobListener.java │ └── JarModule.java │ ├── servers │ ├── HadoopLocationWizard.java │ ├── HadoopServerSelectionListContentProvider.java │ ├── IHadoopServerListener.java │ ├── RunOnHadoopWizard.java │ └── ServerRegistry.java │ └── view │ └── servers │ └── ServerView.java ├── ivy.xml └── ivy ├── hadoop-client-pom-template.xml ├── hadoop-core-pom-template.xml ├── hadoop-core.pom ├── hadoop-examples-pom-template.xml ├── hadoop-minicluster-pom-template.xml ├── hadoop-streaming-pom-template.xml ├── hadoop-test-pom-template.xml ├── hadoop-tools-pom-template.xml ├── ivysettings.xml └── libraries.properties /README.md: -------------------------------------------------------------------------------- 1 | hadoop2x-eclipse-plugin 2 | ======================= 3 | 4 | eclipse plugin for hadoop 2.x.x 5 | 6 | 7 | How to build 8 | ---------------------------------------- 9 | 10 | [hdpusr@demo hadoop2x-eclipse-plugin]$ cd src/contrib/eclipse-plugin 11 | 12 | # Assume hadoop installation directory is /usr/share/hadoop 13 | 14 | [hdpusr@apclt eclipse-plugin]$ ant jar -Dversion=2.4.1 -Dhadoop.version=2.4.1 -Declipse.home=/opt/eclipse -Dhadoop.home=/usr/share/hadoop 15 | 16 | final jar will be generated at directory 17 | 18 | ${hadoop2x-eclipse-plugin}/build/contrib/eclipse-plugin/hadoop-eclipse-plugin-2.4.1.jar 19 | 20 | 21 | release version included 22 | ------------------------------------- 23 | 24 | release/hadoop-eclipse-kepler-plugin-2.4.1.jar # not tested yet 25 | 26 | release/hadoop-eclipse-kepler-plugin-2.2.0.jar 27 | 28 | 29 | options required 30 | -------------------------------------- 31 | version: plugin version 32 | 33 | hadoop.version: hadoop version you want to compiled with 34 | 35 | eclipse.home: path of eclipse home 36 | 37 | hadoop.home: path of hadoop 2.x home 38 | 39 | 40 | 41 | How to debug 42 | -------------------------------------- 43 | start eclipse with debug parameter: 44 | 45 | /opt/eclipse/eclipse -clean -consolelog -debug 46 | 47 | 48 | Note: compile issues resolve: 49 | -------------------------------------- 50 | 1. For different hadoop, adjust ${hadoop2x-eclipse-plugin-master}/ivy/libraries.properties, to match hadoop dependency lib version. 51 | 1. modify ${hadoop2x-eclipse-plugin}/src/contrib/eclipse-plugin/build.xml, in the node: 2 | 15 | 17 | 4.0.0 18 | 19 | org.apache 20 | apache 21 | 9 22 | 23 | org.apache.hadoop 24 | hadoop-client 25 | @version 26 | jar 27 | 28 | Apache Hadoop Client 29 | Apache Hadoop Client 30 | 31 | 32 | 33 | org.apache.hadoop 34 | hadoop-core 35 | @version 36 | compile 37 | 38 | 39 | commons-cli 40 | commons-cli 41 | 42 | 43 | commons-httpclient 44 | commons-httpclient 45 | 46 | 47 | tomcat 48 | jasper-compiler 49 | 50 | 51 | tomcat 52 | jasper-runtime 53 | 54 | 55 | javax.servlet 56 | servlet-api 57 | 58 | 59 | javax.servlet.jsp 60 | jsp-api 61 | 62 | 63 | jetty 64 | org.mortbay.jetty 65 | 66 | 67 | org.mortbay.jetty 68 | jetty 69 | 70 | 71 | org.mortbay.jetty 72 | jetty-util 73 | 74 | 75 | org.mortbay.jetty 76 | jsp-2.1 77 | 78 | 79 | org.mortbay.jetty 80 | jsp-api-2.1 81 | 82 | 83 | org.mortbay.jetty 84 | servlet-api-2.5 85 | 86 | 87 | net.sf.kosmosfs 88 | kfs 89 | 90 | 91 | net.java.dev.jets3t 92 | jets3t 93 | 94 | 95 | org.eclipse.jdt 96 | core 97 | 98 | 99 | 100 | 101 | 102 | 103 | 104 | -------------------------------------------------------------------------------- /ivy/hadoop-core-pom-template.xml: -------------------------------------------------------------------------------- 1 | 2 | 18 | 20 | 4.0.0 21 | org.apache.hadoop 22 | hadoop-core 23 | jar 24 | @version 25 | 26 | 27 | commons-cli 28 | commons-cli 29 | 1.2 30 | 31 | 32 | xmlenc 33 | xmlenc 34 | 0.52 35 | 36 | 37 | com.sun.jersey 38 | jersey-core 39 | 1.8 40 | 41 | 42 | com.sun.jersey 43 | jersey-json 44 | 1.8 45 | 46 | 47 | com.sun.jersey 48 | jersey-server 49 | 1.8 50 | 51 | 52 | commons-io 53 | commons-io 54 | 2.1 55 | 56 | 57 | commons-httpclient 58 | commons-httpclient 59 | 3.0.1 60 | 61 | 62 | commons-codec 63 | commons-codec 64 | 1.4 65 | 66 | 67 | org.apache.commons 68 | commons-math 69 | 2.1 70 | 71 | 72 | commons-configuration 73 | commons-configuration 74 | 1.6 75 | 76 | 77 | commons-net 78 | commons-net 79 | 1.4.1 80 | 81 | 82 | org.mortbay.jetty 83 | jetty 84 | 6.1.26 85 | 86 | 87 | org.mortbay.jetty 88 | jetty-util 89 | 6.1.26 90 | 91 | 92 | tomcat 93 | jasper-runtime 94 | 5.5.12 95 | 96 | 97 | tomcat 98 | jasper-compiler 99 | 5.5.12 100 | 101 | 102 | org.mortbay.jetty 103 | jsp-api-2.1 104 | 6.1.14 105 | 106 | 107 | org.mortbay.jetty 108 | jsp-2.1 109 | 6.1.14 110 | 111 | 112 | commons-el 113 | commons-el 114 | 1.0 115 | 116 | 117 | net.java.dev.jets3t 118 | jets3t 119 | 0.6.1 120 | 121 | 122 | hsqldb 123 | hsqldb 124 | 1.8.0.10 125 | 126 | 127 | oro 128 | oro 129 | 2.0.8 130 | 131 | 132 | org.eclipse.jdt 133 | core 134 | 3.1.1 135 | 136 | 137 | org.codehaus.jackson 138 | jackson-mapper-asl 139 | 1.8.8 140 | 141 | 142 | 143 | -------------------------------------------------------------------------------- /ivy/hadoop-examples-pom-template.xml: -------------------------------------------------------------------------------- 1 | 2 | 18 | 19 | 21 | 22 | 4.0.0 23 | org.apache.hadoop 24 | hadoop-examples 25 | jar 26 | @version 27 | 28 | 29 | org.apache.hadoop 30 | hadoop-core 31 | @version 32 | 33 | 34 | 35 | -------------------------------------------------------------------------------- /ivy/hadoop-minicluster-pom-template.xml: -------------------------------------------------------------------------------- 1 | 2 | 15 | 17 | 4.0.0 18 | 19 | org.apache 20 | apache 21 | 9 22 | 23 | org.apache.hadoop 24 | hadoop-minicluster 25 | @version 26 | jar 27 | 28 | Apache Hadoop Mini-Cluster 29 | Apache Hadoop Mini-Cluster 30 | 31 | 32 | 33 | org.apache.hadoop 34 | hadoop-core 35 | @version 36 | compile 37 | 38 | 39 | org.apache.hadoop 40 | hadoop-test 41 | @version 42 | compile 43 | 44 | 45 | com.sun.jersey 46 | jersey-server 47 | 1.0 48 | compile 49 | 50 | 51 | 52 | 53 | 54 | -------------------------------------------------------------------------------- /ivy/hadoop-streaming-pom-template.xml: -------------------------------------------------------------------------------- 1 | 2 | 18 | 19 | 21 | 22 | 4.0.0 23 | org.apache.hadoop 24 | hadoop-streaming 25 | jar 26 | @version 27 | 28 | 29 | org.apache.hadoop 30 | hadoop-core 31 | @version 32 | 33 | 34 | 35 | -------------------------------------------------------------------------------- /ivy/hadoop-test-pom-template.xml: -------------------------------------------------------------------------------- 1 | 2 | 18 | 19 | 21 | 4.0.0 22 | org.apache.hadoop 23 | hadoop-test 24 | jar 25 | @version 26 | 27 | 28 | org.apache.hadoop 29 | hadoop-core 30 | @version 31 | 32 | 33 | org.apache.ftpserver 34 | ftplet-api 35 | 1.0.0 36 | 37 | 38 | org.apache.mina 39 | mina-core 40 | 2.0.0-M5 41 | 42 | 43 | org.apache.ftpserver 44 | ftpserver-core 45 | 1.0.0 46 | 47 | 48 | org.apache.ftpserver 49 | ftpserver-deprecated 50 | 1.0.0-M2 51 | 52 | 53 | 54 | -------------------------------------------------------------------------------- /ivy/hadoop-tools-pom-template.xml: -------------------------------------------------------------------------------- 1 | 2 | 18 | 19 | 21 | 22 | 4.0.0 23 | org.apache.hadoop 24 | hadoop-tools 25 | jar 26 | @version 27 | 28 | 29 | org.apache.hadoop 30 | hadoop-core 31 | @version 32 | 33 | 34 | 35 | -------------------------------------------------------------------------------- /ivy/ivy-2.1.0.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/winghc/hadoop2x-eclipse-plugin/a2e00ed4eb4089c28509ac81ca7ac385c32e265c/ivy/ivy-2.1.0.jar -------------------------------------------------------------------------------- /ivy/ivysettings.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 19 | 20 | 23 | 32 | 35 | 38 | 41 | 43 | 45 | 46 | 47 | 48 | 49 | 54 | 59 | 64 | 65 | 66 | 67 | 68 | 69 | 70 | 71 | 72 | 73 | 74 | 75 | 76 | 77 | 78 | 79 | 80 | 81 | 86 | 87 | 88 | 89 | 90 | 91 | 92 | 93 | -------------------------------------------------------------------------------- /ivy/libraries.properties: -------------------------------------------------------------------------------- 1 | # Licensed under the Apache License, Version 2.0 (the "License"); 2 | # you may not use this file except in compliance with the License. 3 | # You may obtain a copy of the License at 4 | # 5 | # http://www.apache.org/licenses/LICENSE-2.0 6 | # 7 | # Unless required by applicable law or agreed to in writing, software 8 | # distributed under the License is distributed on an "AS IS" BASIS, 9 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 10 | # See the License for the specific language governing permissions and 11 | # limitations under the License. 12 | 13 | #This properties file lists the versions of the various artifacts used by hadoop and components. 14 | #It drives ivy and the generation of a maven POM 15 | 16 | # This is the version of hadoop we are generating 17 | hadoop.version=2.6.0 18 | hadoop-gpl-compression.version=0.1.0 19 | 20 | #These are the versions of our dependencies (in alphabetical order) 21 | apacheant.version=1.7.0 22 | ant-task.version=2.0.10 23 | 24 | asm.version=3.2 25 | aspectj.version=1.6.5 26 | aspectj.version=1.6.11 27 | 28 | checkstyle.version=4.2 29 | 30 | commons-cli.version=1.2 31 | commons-codec.version=1.4 32 | commons-collections.version=3.2.1 33 | commons-configuration.version=1.6 34 | commons-daemon.version=1.0.13 35 | commons-httpclient.version=3.0.1 36 | commons-lang.version=2.6 37 | commons-logging.version=1.0.4 38 | commons-logging-api.version=1.0.4 39 | commons-math.version=2.1 40 | commons-el.version=1.0 41 | commons-fileupload.version=1.2 42 | commons-io.version=2.1 43 | commons-net.version=3.1 44 | core.version=3.1.1 45 | coreplugin.version=1.3.2 46 | 47 | hsqldb.version=1.8.0.10 48 | htrace.version=3.0.4 49 | 50 | ivy.version=2.1.0 51 | 52 | jasper.version=5.5.12 53 | jackson.version=1.9.13 54 | #not able to figureout the version of jsp & jsp-api version to get it resolved throught ivy 55 | # but still declared here as we are going to have a local copy from the lib folder 56 | jsp.version=2.1 57 | jsp-api.version=5.5.12 58 | jsp-api-2.1.version=6.1.14 59 | jsp-2.1.version=6.1.14 60 | jets3t.version=0.6.1 61 | jetty.version=6.1.26 62 | jetty-util.version=6.1.26 63 | jersey-core.version=1.8 64 | jersey-json.version=1.8 65 | jersey-server.version=1.8 66 | junit.version=4.5 67 | jdeb.version=0.8 68 | jdiff.version=1.0.9 69 | json.version=1.0 70 | 71 | kfs.version=0.1 72 | 73 | log4j.version=1.2.17 74 | lucene-core.version=2.3.1 75 | 76 | mockito-all.version=1.8.5 77 | jsch.version=0.1.42 78 | 79 | oro.version=2.0.8 80 | 81 | rats-lib.version=0.5.1 82 | 83 | servlet.version=4.0.6 84 | servlet-api.version=2.5 85 | slf4j-api.version=1.7.5 86 | slf4j-log4j12.version=1.7.5 87 | 88 | wagon-http.version=1.0-beta-2 89 | xmlenc.version=0.52 90 | xerces.version=1.4.4 91 | 92 | protobuf.version=2.5.0 93 | guava.version=11.0.2 94 | netty.version=3.6.2.Final 95 | -------------------------------------------------------------------------------- /release/hadoop-eclipse-kepler-plugin-2.2.0.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/winghc/hadoop2x-eclipse-plugin/a2e00ed4eb4089c28509ac81ca7ac385c32e265c/release/hadoop-eclipse-kepler-plugin-2.2.0.jar -------------------------------------------------------------------------------- /release/hadoop-eclipse-kepler-plugin-2.4.1.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/winghc/hadoop2x-eclipse-plugin/a2e00ed4eb4089c28509ac81ca7ac385c32e265c/release/hadoop-eclipse-kepler-plugin-2.4.1.jar -------------------------------------------------------------------------------- /release/hadoop-eclipse-plugin-2.6.0.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/winghc/hadoop2x-eclipse-plugin/a2e00ed4eb4089c28509ac81ca7ac385c32e265c/release/hadoop-eclipse-plugin-2.6.0.jar -------------------------------------------------------------------------------- /src/contrib/build.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | 58 | 59 | 60 | Tests failed! 61 | 62 | 63 | 64 | 65 | 66 | 67 | 68 | 69 | 70 | 71 | 72 | 73 | 74 | 75 | 77 | 78 | 79 | 80 | 81 | 82 | 83 | 84 | Tests failed! 85 | 86 | 87 | 88 | 89 | 90 | 91 | 92 | 93 | 94 | 95 | 96 | 97 | -------------------------------------------------------------------------------- /src/contrib/eclipse-plugin/.project: -------------------------------------------------------------------------------- 1 | 2 | 3 | MapReduceTools 4 | 5 | 6 | 7 | 8 | 9 | org.eclipse.jdt.core.javabuilder 10 | 11 | 12 | 13 | 14 | org.eclipse.pde.ManifestBuilder 15 | 16 | 17 | 18 | 19 | org.eclipse.pde.SchemaBuilder 20 | 21 | 22 | 23 | 24 | 25 | org.eclipse.pde.PluginNature 26 | org.eclipse.jdt.core.javanature 27 | 28 | 29 | -------------------------------------------------------------------------------- /src/contrib/eclipse-plugin/.settings/org.eclipse.jdt.ui.prefs: -------------------------------------------------------------------------------- 1 | #Tue Aug 14 19:41:15 PDT 2007 2 | eclipse.preferences.version=1 3 | formatter_profile=_Lucene 4 | formatter_settings_version=11 5 | instance/org.eclipse.core.net/org.eclipse.core.net.hasMigrated=true 6 | org.eclipse.jdt.ui.text.custom_code_templates= 7 | -------------------------------------------------------------------------------- /src/contrib/eclipse-plugin/.settings/org.eclipse.wst.validation.prefs: -------------------------------------------------------------------------------- 1 | #Tue Aug 14 19:41:15 PDT 2007 2 | DELEGATES_PREFERENCE=delegateValidatorListorg.eclipse.wst.xsd.core.internal.validation.eclipse.XSDDelegatingValidator\=org.eclipse.wst.xsd.core.internal.validation.eclipse.Validator;org.eclipse.wst.wsdl.validation.internal.eclipse.WSDLDelegatingValidator\=org.eclipse.wst.wsdl.validation.internal.eclipse.Validator; 3 | USER_BUILD_PREFERENCE=enabledBuildValidatorListorg.eclipse.wst.xsd.core.internal.validation.eclipse.XSDDelegatingValidator;org.eclipse.jst.jsp.core.internal.validation.JSPContentValidator;org.eclipse.wst.html.internal.validation.HTMLValidator;org.eclipse.wst.xml.core.internal.validation.eclipse.Validator;org.eclipse.jst.jsf.validation.internal.appconfig.AppConfigValidator;org.eclipse.jst.jsp.core.internal.validation.JSPBatchValidator;org.eclipse.wst.dtd.core.internal.validation.eclipse.Validator;org.eclipse.wst.wsi.ui.internal.WSIMessageValidator;org.eclipse.wst.wsdl.validation.internal.eclipse.WSDLDelegatingValidator;org.eclipse.jst.jsf.validation.internal.JSPSemanticsValidator; 4 | USER_MANUAL_PREFERENCE=enabledManualValidatorListorg.eclipse.wst.xsd.core.internal.validation.eclipse.XSDDelegatingValidator;org.eclipse.jst.jsp.core.internal.validation.JSPContentValidator;org.eclipse.wst.html.internal.validation.HTMLValidator;org.eclipse.wst.xml.core.internal.validation.eclipse.Validator;org.eclipse.jst.jsf.validation.internal.appconfig.AppConfigValidator;org.eclipse.jst.jsp.core.internal.validation.JSPBatchValidator;org.eclipse.wst.dtd.core.internal.validation.eclipse.Validator;org.eclipse.wst.wsi.ui.internal.WSIMessageValidator;org.eclipse.wst.wsdl.validation.internal.eclipse.WSDLDelegatingValidator;org.eclipse.jst.jsf.validation.internal.JSPSemanticsValidator; 5 | USER_PREFERENCE=overrideGlobalPreferencesfalse 6 | eclipse.preferences.version=1 7 | -------------------------------------------------------------------------------- /src/contrib/eclipse-plugin/META-INF/MANIFEST.MF: -------------------------------------------------------------------------------- 1 | Manifest-Version: 1.0 2 | Bundle-ManifestVersion: 2 3 | Bundle-Name: MapReduce Tools for Eclipse 4 | Bundle-SymbolicName: org.apache.hadoop.eclipse;singleton:=true 5 | Bundle-Version: 0.18 6 | Bundle-Activator: org.apache.hadoop.eclipse.Activator 7 | Bundle-Localization: plugin 8 | Require-Bundle: org.eclipse.ui, 9 | org.eclipse.core.runtime, 10 | org.eclipse.jdt.launching, 11 | org.eclipse.debug.core, 12 | org.eclipse.jdt, 13 | org.eclipse.jdt.core, 14 | org.eclipse.core.resources, 15 | org.eclipse.ui.ide, 16 | org.eclipse.jdt.ui, 17 | org.eclipse.debug.ui, 18 | org.eclipse.jdt.debug.ui, 19 | org.eclipse.core.expressions, 20 | org.eclipse.ui.cheatsheets, 21 | org.eclipse.ui.console, 22 | org.eclipse.ui.navigator, 23 | org.eclipse.core.filesystem, 24 | org.apache.commons.logging 25 | Eclipse-LazyStart: true 26 | Bundle-Vendor: Apache Hadoop 27 | -------------------------------------------------------------------------------- /src/contrib/eclipse-plugin/build.properties: -------------------------------------------------------------------------------- 1 | # Licensed under the Apache License, Version 2.0 (the "License"); 2 | # you may not use this file except in compliance with the License. 3 | # You may obtain a copy of the License at 4 | # 5 | # http://www.apache.org/licenses/LICENSE-2.0 6 | # 7 | # Unless required by applicable law or agreed to in writing, software 8 | # distributed under the License is distributed on an "AS IS" BASIS, 9 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 10 | # See the License for the specific language governing permissions and 11 | # limitations under the License. 12 | 13 | 14 | output.. = bin/ 15 | bin.includes = META-INF/,\ 16 | plugin.xml,\ 17 | resources/,\ 18 | classes/,\ 19 | classes/,\ 20 | lib/ 21 | -------------------------------------------------------------------------------- /src/contrib/eclipse-plugin/build.xml.bak: -------------------------------------------------------------------------------- 1 | 2 | 3 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 62 | 63 | 64 | 65 | 66 | 67 | 68 | 69 | 70 | 71 | 74 | 75 | 76 | 77 | 78 | 79 | 80 | -------------------------------------------------------------------------------- /src/contrib/eclipse-plugin/ivy.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | Apache Hadoop 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 31 | 32 | 36 | 37 | 41 | 42 | 46 | 47 | 51 | 52 | 53 | 54 | 58 | 62 | 63 | 64 | -------------------------------------------------------------------------------- /src/contrib/eclipse-plugin/ivy/libraries.properties: -------------------------------------------------------------------------------- 1 | # Licensed under the Apache License, Version 2.0 (the "License"); 2 | # you may not use this file except in compliance with the License. 3 | # You may obtain a copy of the License at 4 | # 5 | # http://www.apache.org/licenses/LICENSE-2.0 6 | # 7 | # Unless required by applicable law or agreed to in writing, software 8 | # distributed under the License is distributed on an "AS IS" BASIS, 9 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 10 | # See the License for the specific language governing permissions and 11 | # limitations under the License. 12 | 13 | 14 | #This properties file lists the versions of the various artifacts used by streaming. 15 | #It drives ivy and the generation of a maven POM 16 | 17 | #Please list the dependencies name with version if they are different from the ones 18 | #listed in the global libraries.properties file (in alphabetical order) 19 | -------------------------------------------------------------------------------- /src/contrib/eclipse-plugin/makePlus.sh: -------------------------------------------------------------------------------- 1 | ant jar -Dversion=2.0.4 -Declipse.home=/opt/eclipse -Dhadoop.home=/usr/share/hadoop 2 | -------------------------------------------------------------------------------- /src/contrib/eclipse-plugin/resources/Components/Conf.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/winghc/hadoop2x-eclipse-plugin/a2e00ed4eb4089c28509ac81ca7ac385c32e265c/src/contrib/eclipse-plugin/resources/Components/Conf.png -------------------------------------------------------------------------------- /src/contrib/eclipse-plugin/resources/Components/Export.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/winghc/hadoop2x-eclipse-plugin/a2e00ed4eb4089c28509ac81ca7ac385c32e265c/src/contrib/eclipse-plugin/resources/Components/Export.png -------------------------------------------------------------------------------- /src/contrib/eclipse-plugin/resources/Components/Import.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/winghc/hadoop2x-eclipse-plugin/a2e00ed4eb4089c28509ac81ca7ac385c32e265c/src/contrib/eclipse-plugin/resources/Components/Import.png -------------------------------------------------------------------------------- /src/contrib/eclipse-plugin/resources/Components/New.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/winghc/hadoop2x-eclipse-plugin/a2e00ed4eb4089c28509ac81ca7ac385c32e265c/src/contrib/eclipse-plugin/resources/Components/New.png -------------------------------------------------------------------------------- /src/contrib/eclipse-plugin/resources/Components/Reload.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/winghc/hadoop2x-eclipse-plugin/a2e00ed4eb4089c28509ac81ca7ac385c32e265c/src/contrib/eclipse-plugin/resources/Components/Reload.png -------------------------------------------------------------------------------- /src/contrib/eclipse-plugin/resources/Components/Tool.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/winghc/hadoop2x-eclipse-plugin/a2e00ed4eb4089c28509ac81ca7ac385c32e265c/src/contrib/eclipse-plugin/resources/Components/Tool.png -------------------------------------------------------------------------------- /src/contrib/eclipse-plugin/resources/Components/Tools.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/winghc/hadoop2x-eclipse-plugin/a2e00ed4eb4089c28509ac81ca7ac385c32e265c/src/contrib/eclipse-plugin/resources/Components/Tools.png -------------------------------------------------------------------------------- /src/contrib/eclipse-plugin/resources/ConnectDFS.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | This tutorial informs you how to set the default Hadoop 7 | directory for the plugin. 8 | 9 | 10 | 11 | 12 | Define a MapReduce cluster [if you have not done so already] 13 | by opening the MapReduce Servers view and clicking on the 14 | blue elephant in the upper right. 15 | 16 | Use the following embedded command to create a new Hadoop Server: 17 | 18 | 19 | 21 | 22 | 23 | 24 | 25 | Project Explorer view shows an elephant icon for each defined 26 | server. Opening a server entry will open a connection to 27 | the root of that server's DFS tree. You can then explore the 28 | DFS tree. 29 | 30 | 31 | 32 | 33 | -------------------------------------------------------------------------------- /src/contrib/eclipse-plugin/resources/CreateProj.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | This tutorial guides you through the creation of a simple 6 | MapReduce project with three MapReduce classes: a Mapper, a 7 | Reducer, and a Driver. 8 | 9 | 10 | 11 | 14 | 15 | Select Window->Open Perspective->MapReduce in the menubar at 16 | the top of the workbench. This step changes the perspective 17 | to set up the Eclipse workbench for MapReduce development. 18 | 19 | 20 | 21 | 23 | 24 | The first thing you will need is a MapReduce Project. If you 25 | already have a MapReduce project in your workspace that you 26 | would like to use, you may skip this step by clicking the 27 | "Click to Skip" button. If not, select File->New->Project 28 | and choose MapReduce Project in the list. Complete the 29 | subsequent pages as required. 30 | 31 | 32 | 33 | 35 | 36 | You should now have a MapReduce project in your workspace. 37 | The next thing to do is creating a package. Use the Eclipse 38 | tools by selecting File -> New ->Package action. Specify the 39 | source folder (the project containing the package). Then, 40 | give the package a name, such as "mapreduce.test", and click 41 | the "Finish" button. If you already have a project with a 42 | package you might as well skip this step. 43 | 44 | 45 | 46 | 47 | Now you should be set up for creating your MapReduce 48 | application. The MapReduce application consists of three 49 | classes: a Mapper class, a Reducer class and a Driver class. 50 | In this step you will create the three classes. Use the 51 | class wizard by selecting File -> New -> Class. 52 | Repeat this for every class. 53 | 54 | 55 | 56 | 59 | 60 | 61 | 62 | 63 | -------------------------------------------------------------------------------- /src/contrib/eclipse-plugin/resources/Elephant-16x16.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/winghc/hadoop2x-eclipse-plugin/a2e00ed4eb4089c28509ac81ca7ac385c32e265c/src/contrib/eclipse-plugin/resources/Elephant-16x16.png -------------------------------------------------------------------------------- /src/contrib/eclipse-plugin/resources/Elephant-24x24.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/winghc/hadoop2x-eclipse-plugin/a2e00ed4eb4089c28509ac81ca7ac385c32e265c/src/contrib/eclipse-plugin/resources/Elephant-24x24.png -------------------------------------------------------------------------------- /src/contrib/eclipse-plugin/resources/Elephant-32x32.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/winghc/hadoop2x-eclipse-plugin/a2e00ed4eb4089c28509ac81ca7ac385c32e265c/src/contrib/eclipse-plugin/resources/Elephant-32x32.png -------------------------------------------------------------------------------- /src/contrib/eclipse-plugin/resources/Elephant-64x64.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/winghc/hadoop2x-eclipse-plugin/a2e00ed4eb4089c28509ac81ca7ac385c32e265c/src/contrib/eclipse-plugin/resources/Elephant-64x64.png -------------------------------------------------------------------------------- /src/contrib/eclipse-plugin/resources/Elephant-small-16x16.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/winghc/hadoop2x-eclipse-plugin/a2e00ed4eb4089c28509ac81ca7ac385c32e265c/src/contrib/eclipse-plugin/resources/Elephant-small-16x16.png -------------------------------------------------------------------------------- /src/contrib/eclipse-plugin/resources/Elephant.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/winghc/hadoop2x-eclipse-plugin/a2e00ed4eb4089c28509ac81ca7ac385c32e265c/src/contrib/eclipse-plugin/resources/Elephant.jpg -------------------------------------------------------------------------------- /src/contrib/eclipse-plugin/resources/Elephant100x100.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/winghc/hadoop2x-eclipse-plugin/a2e00ed4eb4089c28509ac81ca7ac385c32e265c/src/contrib/eclipse-plugin/resources/Elephant100x100.gif -------------------------------------------------------------------------------- /src/contrib/eclipse-plugin/resources/Elephant16x16.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/winghc/hadoop2x-eclipse-plugin/a2e00ed4eb4089c28509ac81ca7ac385c32e265c/src/contrib/eclipse-plugin/resources/Elephant16x16.gif -------------------------------------------------------------------------------- /src/contrib/eclipse-plugin/resources/Elephant2-136x136.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/winghc/hadoop2x-eclipse-plugin/a2e00ed4eb4089c28509ac81ca7ac385c32e265c/src/contrib/eclipse-plugin/resources/Elephant2-136x136.png -------------------------------------------------------------------------------- /src/contrib/eclipse-plugin/resources/Elephant2-16x16.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/winghc/hadoop2x-eclipse-plugin/a2e00ed4eb4089c28509ac81ca7ac385c32e265c/src/contrib/eclipse-plugin/resources/Elephant2-16x16.png -------------------------------------------------------------------------------- /src/contrib/eclipse-plugin/resources/Elephant2-24x24.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/winghc/hadoop2x-eclipse-plugin/a2e00ed4eb4089c28509ac81ca7ac385c32e265c/src/contrib/eclipse-plugin/resources/Elephant2-24x24.png -------------------------------------------------------------------------------- /src/contrib/eclipse-plugin/resources/Elephant2-32x32.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/winghc/hadoop2x-eclipse-plugin/a2e00ed4eb4089c28509ac81ca7ac385c32e265c/src/contrib/eclipse-plugin/resources/Elephant2-32x32.png -------------------------------------------------------------------------------- /src/contrib/eclipse-plugin/resources/Elephant2-64x64.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/winghc/hadoop2x-eclipse-plugin/a2e00ed4eb4089c28509ac81ca7ac385c32e265c/src/contrib/eclipse-plugin/resources/Elephant2-64x64.png -------------------------------------------------------------------------------- /src/contrib/eclipse-plugin/resources/Elephant2.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/winghc/hadoop2x-eclipse-plugin/a2e00ed4eb4089c28509ac81ca7ac385c32e265c/src/contrib/eclipse-plugin/resources/Elephant2.jpg -------------------------------------------------------------------------------- /src/contrib/eclipse-plugin/resources/Elephant3-122x122.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/winghc/hadoop2x-eclipse-plugin/a2e00ed4eb4089c28509ac81ca7ac385c32e265c/src/contrib/eclipse-plugin/resources/Elephant3-122x122.png -------------------------------------------------------------------------------- /src/contrib/eclipse-plugin/resources/Elephant3-16x16.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/winghc/hadoop2x-eclipse-plugin/a2e00ed4eb4089c28509ac81ca7ac385c32e265c/src/contrib/eclipse-plugin/resources/Elephant3-16x16.png -------------------------------------------------------------------------------- /src/contrib/eclipse-plugin/resources/Elephant3-24x24.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/winghc/hadoop2x-eclipse-plugin/a2e00ed4eb4089c28509ac81ca7ac385c32e265c/src/contrib/eclipse-plugin/resources/Elephant3-24x24.png -------------------------------------------------------------------------------- /src/contrib/eclipse-plugin/resources/HelloWorld.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 6 | IBM(R) MapReduce Tools for Eclipse enables you to write 7 | distributed applications based on the MapReduce paradigm 8 | using the Apache Hadoop runtime. This cheat sheet will walk 9 | you through the steps needed to write a MapReduce 10 | application and run it on a Hadoop server. 11 | 12 | 13 | 14 | 15 | 16 | 17 | This task takes you through the steps to setup the 18 | Hadoop environment with the MapReduce Tools. If you 19 | already have Hadoop installed and linked to Eclipse, you 20 | can skip this task. 21 | 22 | 23 | Congratulations! You have now installed Hadoop on your 24 | computer and linked it with the MapReduce Tools. 25 | 26 | 28 | 29 | Hadoop must be downloaded to a place where Eclipse 30 | can access its libraries. This task covers the steps 31 | needed to execute this task. 32 | 33 | 34 | 35 | 36 | The plugin currently supports Hadoop v0.7.2 through 37 | 0.12.2. Now click on the top-most link that you feel 38 | comfortable installing. 39 | 40 | 41 | 43 | ... 44 | 45 | This tutorial informs you how to set the default 46 | Hadoop directory for the plugin. 47 | 48 | 49 | 50 | 51 | 52 | 54 | 55 | This section walks you through the steps to create and 56 | run your MapReduce project. 57 | 58 | 59 | 61 | 62 | This tutorial guides you through the creation of a 63 | simple MapReduce project with three MapReduce 64 | classes: a Mapper, a Reducer, and a Driver. 65 | 66 | 67 | 68 | 69 | Congratulations! You have now mastered the steps for 70 | creating a Hadoop project. 71 | 72 | 73 | 75 | 76 | 77 | Congratulations! You have now mastered the steps for 78 | implementing a Hadoop application. 79 | 80 | 81 | 82 | 83 | 84 | 86 | 87 | The MapReduce Tools for Eclipse plugin lets you 88 | browse and upload files to the DFS of a MapReduce cluster. 89 | 90 | 91 | Congratulations! You have completed the tutorials on using a 92 | MapReduce Cluster. 93 | 94 | 96 | 97 | This tutorial explains how to show files in the DFS of a 98 | MapReduce cluster. 99 | 100 | 101 | 102 | 103 | 105 | 106 | Simply double-click on any file in the DFS in the Project 107 | Explorer view. 108 | 109 | 110 | 112 | 113 | Right-click on an existing directory in the DFS.
114 | Choose the Import from local directory option. 115 |
116 | Note that files can only be uploaded to the HDFS at this time. 117 |
118 |
119 |
120 |
121 |
-------------------------------------------------------------------------------- /src/contrib/eclipse-plugin/resources/MAP100x100.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/winghc/hadoop2x-eclipse-plugin/a2e00ed4eb4089c28509ac81ca7ac385c32e265c/src/contrib/eclipse-plugin/resources/MAP100x100.gif -------------------------------------------------------------------------------- /src/contrib/eclipse-plugin/resources/MAP16x15.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/winghc/hadoop2x-eclipse-plugin/a2e00ed4eb4089c28509ac81ca7ac385c32e265c/src/contrib/eclipse-plugin/resources/MAP16x15.gif -------------------------------------------------------------------------------- /src/contrib/eclipse-plugin/resources/RunProj.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | This tutorial informs you how to run your newly created 6 | MapReduce Project in one of two fashions: locally as a Java 7 | Application, or on a Hadoop Server. 8 | 9 | 10 | 11 | 12 | To run your MapReduce application locally, right-click on 13 | your Driver class in the Package Explorer and select Run as 14 | / Java Application. 15 | 16 | 17 | 18 | 19 | To run your MapReduce application on a Hadoop server, right-click on 20 | your Driver class in the Package Explorer and select Run as 21 | / Run on Hadoop. 22 | 23 | 24 | 25 | -------------------------------------------------------------------------------- /src/contrib/eclipse-plugin/resources/SetHadoopPath.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | This tutorial informs you how to set the default Hadoop 7 | directory for the plugin. 8 | 9 | 10 | 11 | 12 | To set the default Hadoop directory, open the plugin 13 | preferences from the menu option 14 | Window > Preferences.
15 | Go to the Hadoop Home Directory 16 | preference, and enter the installation directory there. 17 | 18 | Use the following embedded command to open the Preferences 19 | window: 20 |
21 | 22 | 24 |
25 |
26 | -------------------------------------------------------------------------------- /src/contrib/eclipse-plugin/resources/Setup.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | This cheat sheet launches a browser to the Hadoop website. 5 | 6 | 7 | 8 | Go to http://hadoop.apache.org/core/, and follow 9 | links to download the latest stable distribution of 10 | Hadoop. 11 | 12 | 13 | Use the following embedded command to launch the Hadoop Web site 14 | in a browser 15 | 17 | 18 | 19 | -------------------------------------------------------------------------------- /src/contrib/eclipse-plugin/resources/download.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/winghc/hadoop2x-eclipse-plugin/a2e00ed4eb4089c28509ac81ca7ac385c32e265c/src/contrib/eclipse-plugin/resources/download.png -------------------------------------------------------------------------------- /src/contrib/eclipse-plugin/resources/drive100x100.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/winghc/hadoop2x-eclipse-plugin/a2e00ed4eb4089c28509ac81ca7ac385c32e265c/src/contrib/eclipse-plugin/resources/drive100x100.gif -------------------------------------------------------------------------------- /src/contrib/eclipse-plugin/resources/drive16x16.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/winghc/hadoop2x-eclipse-plugin/a2e00ed4eb4089c28509ac81ca7ac385c32e265c/src/contrib/eclipse-plugin/resources/drive16x16.gif -------------------------------------------------------------------------------- /src/contrib/eclipse-plugin/resources/driver.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/winghc/hadoop2x-eclipse-plugin/a2e00ed4eb4089c28509ac81ca7ac385c32e265c/src/contrib/eclipse-plugin/resources/driver.png -------------------------------------------------------------------------------- /src/contrib/eclipse-plugin/resources/driverwiz.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/winghc/hadoop2x-eclipse-plugin/a2e00ed4eb4089c28509ac81ca7ac385c32e265c/src/contrib/eclipse-plugin/resources/driverwiz.png -------------------------------------------------------------------------------- /src/contrib/eclipse-plugin/resources/elephantblue16x16.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/winghc/hadoop2x-eclipse-plugin/a2e00ed4eb4089c28509ac81ca7ac385c32e265c/src/contrib/eclipse-plugin/resources/elephantblue16x16.gif -------------------------------------------------------------------------------- /src/contrib/eclipse-plugin/resources/files.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/winghc/hadoop2x-eclipse-plugin/a2e00ed4eb4089c28509ac81ca7ac385c32e265c/src/contrib/eclipse-plugin/resources/files.gif -------------------------------------------------------------------------------- /src/contrib/eclipse-plugin/resources/hadoop-logo-16x16.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/winghc/hadoop2x-eclipse-plugin/a2e00ed4eb4089c28509ac81ca7ac385c32e265c/src/contrib/eclipse-plugin/resources/hadoop-logo-16x16.png -------------------------------------------------------------------------------- /src/contrib/eclipse-plugin/resources/hadoop-logo-24x24.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/winghc/hadoop2x-eclipse-plugin/a2e00ed4eb4089c28509ac81ca7ac385c32e265c/src/contrib/eclipse-plugin/resources/hadoop-logo-24x24.png -------------------------------------------------------------------------------- /src/contrib/eclipse-plugin/resources/hadoop-logo-85x85.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/winghc/hadoop2x-eclipse-plugin/a2e00ed4eb4089c28509ac81ca7ac385c32e265c/src/contrib/eclipse-plugin/resources/hadoop-logo-85x85.png -------------------------------------------------------------------------------- /src/contrib/eclipse-plugin/resources/hadoop-logo.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/winghc/hadoop2x-eclipse-plugin/a2e00ed4eb4089c28509ac81ca7ac385c32e265c/src/contrib/eclipse-plugin/resources/hadoop-logo.jpg -------------------------------------------------------------------------------- /src/contrib/eclipse-plugin/resources/hadoop.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/winghc/hadoop2x-eclipse-plugin/a2e00ed4eb4089c28509ac81ca7ac385c32e265c/src/contrib/eclipse-plugin/resources/hadoop.gif -------------------------------------------------------------------------------- /src/contrib/eclipse-plugin/resources/hadoop_small.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/winghc/hadoop2x-eclipse-plugin/a2e00ed4eb4089c28509ac81ca7ac385c32e265c/src/contrib/eclipse-plugin/resources/hadoop_small.gif -------------------------------------------------------------------------------- /src/contrib/eclipse-plugin/resources/job.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/winghc/hadoop2x-eclipse-plugin/a2e00ed4eb4089c28509ac81ca7ac385c32e265c/src/contrib/eclipse-plugin/resources/job.gif -------------------------------------------------------------------------------- /src/contrib/eclipse-plugin/resources/location-edit-16x16.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/winghc/hadoop2x-eclipse-plugin/a2e00ed4eb4089c28509ac81ca7ac385c32e265c/src/contrib/eclipse-plugin/resources/location-edit-16x16.png -------------------------------------------------------------------------------- /src/contrib/eclipse-plugin/resources/location-new-16x16.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/winghc/hadoop2x-eclipse-plugin/a2e00ed4eb4089c28509ac81ca7ac385c32e265c/src/contrib/eclipse-plugin/resources/location-new-16x16.png -------------------------------------------------------------------------------- /src/contrib/eclipse-plugin/resources/map16x16.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/winghc/hadoop2x-eclipse-plugin/a2e00ed4eb4089c28509ac81ca7ac385c32e265c/src/contrib/eclipse-plugin/resources/map16x16.gif -------------------------------------------------------------------------------- /src/contrib/eclipse-plugin/resources/mapper16.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/winghc/hadoop2x-eclipse-plugin/a2e00ed4eb4089c28509ac81ca7ac385c32e265c/src/contrib/eclipse-plugin/resources/mapper16.png -------------------------------------------------------------------------------- /src/contrib/eclipse-plugin/resources/mapwiz.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/winghc/hadoop2x-eclipse-plugin/a2e00ed4eb4089c28509ac81ca7ac385c32e265c/src/contrib/eclipse-plugin/resources/mapwiz.png -------------------------------------------------------------------------------- /src/contrib/eclipse-plugin/resources/new-folder.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/winghc/hadoop2x-eclipse-plugin/a2e00ed4eb4089c28509ac81ca7ac385c32e265c/src/contrib/eclipse-plugin/resources/new-folder.png -------------------------------------------------------------------------------- /src/contrib/eclipse-plugin/resources/projwiz.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/winghc/hadoop2x-eclipse-plugin/a2e00ed4eb4089c28509ac81ca7ac385c32e265c/src/contrib/eclipse-plugin/resources/projwiz.png -------------------------------------------------------------------------------- /src/contrib/eclipse-plugin/resources/reduce100x100.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/winghc/hadoop2x-eclipse-plugin/a2e00ed4eb4089c28509ac81ca7ac385c32e265c/src/contrib/eclipse-plugin/resources/reduce100x100.gif -------------------------------------------------------------------------------- /src/contrib/eclipse-plugin/resources/reduce16x16.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/winghc/hadoop2x-eclipse-plugin/a2e00ed4eb4089c28509ac81ca7ac385c32e265c/src/contrib/eclipse-plugin/resources/reduce16x16.gif -------------------------------------------------------------------------------- /src/contrib/eclipse-plugin/resources/reducer-16x16.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/winghc/hadoop2x-eclipse-plugin/a2e00ed4eb4089c28509ac81ca7ac385c32e265c/src/contrib/eclipse-plugin/resources/reducer-16x16.gif -------------------------------------------------------------------------------- /src/contrib/eclipse-plugin/resources/reducer16.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/winghc/hadoop2x-eclipse-plugin/a2e00ed4eb4089c28509ac81ca7ac385c32e265c/src/contrib/eclipse-plugin/resources/reducer16.png -------------------------------------------------------------------------------- /src/contrib/eclipse-plugin/resources/reducewiz.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/winghc/hadoop2x-eclipse-plugin/a2e00ed4eb4089c28509ac81ca7ac385c32e265c/src/contrib/eclipse-plugin/resources/reducewiz.png -------------------------------------------------------------------------------- /src/contrib/eclipse-plugin/resources/refresh.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/winghc/hadoop2x-eclipse-plugin/a2e00ed4eb4089c28509ac81ca7ac385c32e265c/src/contrib/eclipse-plugin/resources/refresh.png -------------------------------------------------------------------------------- /src/contrib/eclipse-plugin/resources/spite_overcloud.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/winghc/hadoop2x-eclipse-plugin/a2e00ed4eb4089c28509ac81ca7ac385c32e265c/src/contrib/eclipse-plugin/resources/spite_overcloud.png -------------------------------------------------------------------------------- /src/contrib/eclipse-plugin/resources/spitesmall.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/winghc/hadoop2x-eclipse-plugin/a2e00ed4eb4089c28509ac81ca7ac385c32e265c/src/contrib/eclipse-plugin/resources/spitesmall.gif -------------------------------------------------------------------------------- /src/contrib/eclipse-plugin/resources/spitesmall.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/winghc/hadoop2x-eclipse-plugin/a2e00ed4eb4089c28509ac81ca7ac385c32e265c/src/contrib/eclipse-plugin/resources/spitesmall.png -------------------------------------------------------------------------------- /src/contrib/eclipse-plugin/resources/upload.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/winghc/hadoop2x-eclipse-plugin/a2e00ed4eb4089c28509ac81ca7ac385c32e265c/src/contrib/eclipse-plugin/resources/upload.png -------------------------------------------------------------------------------- /src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/Activator.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.hadoop.eclipse; 20 | 21 | import org.apache.hadoop.eclipse.servers.ServerRegistry; 22 | import org.eclipse.ui.plugin.AbstractUIPlugin; 23 | import org.osgi.framework.BundleContext; 24 | 25 | /** 26 | * The activator class controls the plug-in life cycle 27 | */ 28 | public class Activator extends AbstractUIPlugin { 29 | 30 | /** 31 | * The plug-in ID 32 | */ 33 | public static final String PLUGIN_ID = "org.apache.hadoop.eclipse"; 34 | 35 | /** 36 | * The shared unique instance (singleton) 37 | */ 38 | private static Activator plugin; 39 | 40 | /** 41 | * Constructor 42 | */ 43 | public Activator() { 44 | synchronized (Activator.class) { 45 | if (plugin != null) { 46 | // Not a singleton!? 47 | throw new RuntimeException("Activator for " + PLUGIN_ID 48 | + " is not a singleton"); 49 | } 50 | plugin = this; 51 | } 52 | } 53 | 54 | /* @inheritDoc */ 55 | @Override 56 | public void start(BundleContext context) throws Exception { 57 | super.start(context); 58 | } 59 | 60 | /* @inheritDoc */ 61 | @Override 62 | public void stop(BundleContext context) throws Exception { 63 | ServerRegistry.getInstance().dispose(); 64 | plugin = null; 65 | super.stop(context); 66 | } 67 | 68 | /** 69 | * Returns the shared unique instance (singleton) 70 | * 71 | * @return the shared unique instance (singleton) 72 | */ 73 | public static Activator getDefault() { 74 | return plugin; 75 | } 76 | 77 | } 78 | -------------------------------------------------------------------------------- /src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/ErrorMessageDialog.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.hadoop.eclipse; 20 | 21 | import org.eclipse.jface.dialogs.MessageDialog; 22 | import org.eclipse.swt.widgets.Display; 23 | 24 | /** 25 | * Error dialog helper 26 | */ 27 | public class ErrorMessageDialog { 28 | 29 | public static void display(final String title, final String message) { 30 | Display.getDefault().syncExec(new Runnable() { 31 | 32 | public void run() { 33 | MessageDialog.openError(Display.getDefault().getActiveShell(), 34 | title, message); 35 | } 36 | 37 | }); 38 | } 39 | 40 | public static void display(Exception e) { 41 | display("An exception has occured!", "Exception description:\n" 42 | + e.getLocalizedMessage()); 43 | } 44 | 45 | } 46 | -------------------------------------------------------------------------------- /src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/HadoopPerspectiveFactory.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.hadoop.eclipse; 20 | 21 | import org.eclipse.debug.ui.IDebugUIConstants; 22 | import org.eclipse.jdt.ui.JavaUI; 23 | import org.eclipse.ui.IFolderLayout; 24 | import org.eclipse.ui.IPageLayout; 25 | import org.eclipse.ui.IPerspectiveFactory; 26 | import org.eclipse.ui.console.IConsoleConstants; 27 | 28 | /** 29 | * Creates links to the new MapReduce-based wizards and views for a MapReduce 30 | * perspective 31 | * 32 | */ 33 | 34 | public class HadoopPerspectiveFactory implements IPerspectiveFactory { 35 | 36 | public void createInitialLayout(IPageLayout layout) { 37 | layout.addNewWizardShortcut("org.apache.hadoop.eclipse.NewDriverWizard"); 38 | layout.addNewWizardShortcut("org.apache.hadoop.eclipse.NewMapperWizard"); 39 | layout 40 | .addNewWizardShortcut("org.apache.hadoop.eclipse.NewReducerWizard"); 41 | 42 | IFolderLayout left = 43 | layout.createFolder("org.apache.hadoop.eclipse.perspective.left", 44 | IPageLayout.LEFT, 0.2f, layout.getEditorArea()); 45 | left.addView("org.eclipse.ui.navigator.ProjectExplorer"); 46 | 47 | IFolderLayout bottom = 48 | layout.createFolder("org.apache.hadoop.eclipse.perspective.bottom", 49 | IPageLayout.BOTTOM, 0.7f, layout.getEditorArea()); 50 | bottom.addView(IPageLayout.ID_PROBLEM_VIEW); 51 | bottom.addView(IPageLayout.ID_TASK_LIST); 52 | bottom.addView(JavaUI.ID_JAVADOC_VIEW); 53 | bottom.addView("org.apache.hadoop.eclipse.view.servers"); 54 | bottom.addPlaceholder(JavaUI.ID_SOURCE_VIEW); 55 | bottom.addPlaceholder(IPageLayout.ID_PROGRESS_VIEW); 56 | bottom.addPlaceholder(IConsoleConstants.ID_CONSOLE_VIEW); 57 | bottom.addPlaceholder(IPageLayout.ID_BOOKMARKS); 58 | 59 | IFolderLayout right = 60 | layout.createFolder("org.apache.hadoop.eclipse.perspective.right", 61 | IPageLayout.RIGHT, 0.8f, layout.getEditorArea()); 62 | right.addView(IPageLayout.ID_OUTLINE); 63 | right.addView("org.eclipse.ui.cheatsheets.views.CheatSheetView"); 64 | // right.addView(layout.ID); .. cheat sheet here 65 | 66 | layout.addActionSet(IDebugUIConstants.LAUNCH_ACTION_SET); 67 | layout.addActionSet(JavaUI.ID_ACTION_SET); 68 | layout.addActionSet(JavaUI.ID_CODING_ACTION_SET); 69 | layout.addActionSet(JavaUI.ID_ELEMENT_CREATION_ACTION_SET); 70 | layout.addActionSet(IPageLayout.ID_NAVIGATE_ACTION_SET); 71 | layout.addActionSet(JavaUI.ID_SEARCH_ACTION_SET); 72 | 73 | layout 74 | .addNewWizardShortcut("org.eclipse.jdt.ui.wizards.NewPackageCreationWizard"); 75 | layout 76 | .addNewWizardShortcut("org.eclipse.jdt.ui.wizards.NewClassCreationWizard"); 77 | layout 78 | .addNewWizardShortcut("org.eclipse.jdt.ui.wizards.NewInterfaceCreationWizard"); 79 | layout 80 | .addNewWizardShortcut("org.eclipse.jdt.ui.wizards.NewEnumCreationWizard"); 81 | layout 82 | .addNewWizardShortcut("org.eclipse.jdt.ui.wizards.NewAnnotationCreationWizard"); 83 | layout 84 | .addNewWizardShortcut("org.eclipse.jdt.ui.wizards.NewSourceFolderCreationWizard"); 85 | layout 86 | .addNewWizardShortcut("org.eclipse.jdt.ui.wizards.NewSnippetFileCreationWizard"); 87 | layout.addNewWizardShortcut("org.eclipse.ui.wizards.new.folder"); 88 | layout.addNewWizardShortcut("org.eclipse.ui.wizards.new.file"); 89 | layout 90 | .addNewWizardShortcut("org.eclipse.ui.editors.wizards.UntitledTextFileWizard"); 91 | 92 | // CheatSheetViewerFactory.createCheatSheetView().setInput("org.apache.hadoop.eclipse.cheatsheet"); 93 | } 94 | 95 | } 96 | -------------------------------------------------------------------------------- /src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/MapReduceNature.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.hadoop.eclipse; 20 | 21 | import java.io.File; 22 | import java.io.FileFilter; 23 | import java.net.URL; 24 | import java.util.ArrayList; 25 | import java.util.Iterator; 26 | import java.util.logging.Level; 27 | import java.util.logging.Logger; 28 | 29 | import org.eclipse.core.resources.IProject; 30 | import org.eclipse.core.resources.IProjectNature; 31 | import org.eclipse.core.runtime.CoreException; 32 | import org.eclipse.core.runtime.NullProgressMonitor; 33 | import org.eclipse.core.runtime.Path; 34 | import org.eclipse.core.runtime.QualifiedName; 35 | import org.eclipse.jdt.core.IClasspathEntry; 36 | import org.eclipse.jdt.core.IJavaProject; 37 | import org.eclipse.jdt.core.JavaCore; 38 | 39 | /** 40 | * Class to configure and deconfigure an Eclipse project with the MapReduce 41 | * project nature. 42 | */ 43 | 44 | public class MapReduceNature implements IProjectNature { 45 | 46 | public static final String ID = "org.apache.hadoop.eclipse.Nature"; 47 | 48 | private IProject project; 49 | 50 | static Logger log = Logger.getLogger(MapReduceNature.class.getName()); 51 | 52 | /** 53 | * Configures an Eclipse project as a Map/Reduce project by adding the 54 | * Hadoop libraries to a project's classpath. 55 | */ 56 | public void configure() throws CoreException { 57 | String path = 58 | project.getPersistentProperty(new QualifiedName(Activator.PLUGIN_ID, 59 | "hadoop.runtime.path")); 60 | 61 | path += File.separatorChar + "share" + File.separatorChar +"hadoop"; 62 | String[] dirs={"common","mapreduce","hdfs","yarn"}; 63 | final ArrayList coreJars = new ArrayList(); 64 | for(String sub:dirs){ 65 | File dir = new File(path + File.separatorChar + sub); 66 | dir.listFiles(new FileFilter() { 67 | public boolean accept(File pathname) { 68 | String fileName = pathname.getName(); 69 | 70 | // get the hadoop core jar without touching test or examples 71 | // older version of hadoop don't use the word "core" -- eyhung 72 | if ((fileName.indexOf("hadoop") != -1) && (fileName.endsWith("jar")) 73 | && (fileName.indexOf("test") == -1) 74 | && (fileName.indexOf("examples") == -1)) { 75 | coreJars.add(pathname); 76 | } 77 | 78 | return false; // we don't care what this returns 79 | } 80 | }); 81 | File dir2 = new File(path + File.separatorChar + sub + File.separatorChar + "lib"); 82 | if (dir2.exists() && dir2.isDirectory()) { 83 | dir2.listFiles(new FileFilter() { 84 | public boolean accept(File pathname) { 85 | if ((!pathname.isDirectory()) 86 | && (pathname.getName().endsWith("jar"))) { 87 | coreJars.add(pathname); 88 | } 89 | 90 | return false; // we don't care what this returns 91 | } 92 | }); 93 | } 94 | } 95 | // Add Hadoop libraries onto classpath 96 | IJavaProject javaProject = JavaCore.create(getProject()); 97 | // Bundle bundle = Activator.getDefault().getBundle(); 98 | try { 99 | IClasspathEntry[] currentCp = javaProject.getRawClasspath(); 100 | IClasspathEntry[] newCp = 101 | new IClasspathEntry[currentCp.length + coreJars.size()]; 102 | System.arraycopy(currentCp, 0, newCp, 0, currentCp.length); 103 | 104 | final Iterator i = coreJars.iterator(); 105 | int count = 0; 106 | while (i.hasNext()) { 107 | // for (int i = 0; i < s_coreJarNames.length; i++) { 108 | 109 | final File f = (File) i.next(); 110 | // URL url = FileLocator.toFileURL(FileLocator.find(bundle, new 111 | // Path("lib/" + s_coreJarNames[i]), null)); 112 | URL url = f.toURI().toURL(); 113 | log.finer("hadoop library url.getPath() = " + url.getPath()); 114 | 115 | newCp[newCp.length - 1 - count] = 116 | JavaCore.newLibraryEntry(new Path(url.getPath()), null, null); 117 | count++; 118 | } 119 | 120 | javaProject.setRawClasspath(newCp, new NullProgressMonitor()); 121 | } catch (Exception e) { 122 | log.log(Level.SEVERE, "IOException generated in " 123 | + this.getClass().getCanonicalName(), e); 124 | } 125 | } 126 | 127 | /** 128 | * Deconfigure a project from MapReduce status. Currently unimplemented. 129 | */ 130 | public void deconfigure() throws CoreException { 131 | // TODO Auto-generated method stub 132 | } 133 | 134 | /** 135 | * Returns the project to which this project nature applies. 136 | */ 137 | public IProject getProject() { 138 | return this.project; 139 | } 140 | 141 | /** 142 | * Sets the project to which this nature applies. Used when instantiating 143 | * this project nature runtime. 144 | */ 145 | public void setProject(IProject project) { 146 | this.project = project; 147 | } 148 | 149 | } 150 | -------------------------------------------------------------------------------- /src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/NewDriverWizard.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.hadoop.eclipse; 20 | 21 | import org.eclipse.core.resources.IFile; 22 | import org.eclipse.core.runtime.CoreException; 23 | import org.eclipse.core.runtime.IProgressMonitor; 24 | import org.eclipse.jdt.core.IJavaElement; 25 | import org.eclipse.jdt.internal.ui.wizards.NewElementWizard; 26 | import org.eclipse.jface.operation.IRunnableWithProgress; 27 | import org.eclipse.jface.viewers.IStructuredSelection; 28 | import org.eclipse.ui.INewWizard; 29 | import org.eclipse.ui.IWorkbench; 30 | 31 | /** 32 | * Wizard for creating a new Driver class (a class that runs a MapReduce job). 33 | * 34 | */ 35 | 36 | public class NewDriverWizard extends NewElementWizard implements INewWizard, 37 | IRunnableWithProgress { 38 | private NewDriverWizardPage page; 39 | 40 | /* 41 | * @Override public boolean performFinish() { } 42 | */ 43 | public void run(IProgressMonitor monitor) { 44 | try { 45 | page.createType(monitor); 46 | } catch (CoreException e) { 47 | // TODO Auto-generated catch block 48 | e.printStackTrace(); 49 | } catch (InterruptedException e) { 50 | // TODO Auto-generated catch block 51 | e.printStackTrace(); 52 | } 53 | } 54 | 55 | public NewDriverWizard() { 56 | setWindowTitle("New MapReduce Driver"); 57 | } 58 | 59 | @Override 60 | public void init(IWorkbench workbench, IStructuredSelection selection) { 61 | super.init(workbench, selection); 62 | 63 | page = new NewDriverWizardPage(); 64 | addPage(page); 65 | page.setSelection(selection); 66 | } 67 | 68 | @Override 69 | /** 70 | * Performs any actions appropriate in response to the user having pressed the 71 | * Finish button, or refuse if finishing now is not permitted. 72 | */ 73 | public boolean performFinish() { 74 | if (super.performFinish()) { 75 | if (getCreatedElement() != null) { 76 | selectAndReveal(page.getModifiedResource()); 77 | openResource((IFile) page.getModifiedResource()); 78 | } 79 | 80 | return true; 81 | } else { 82 | return false; 83 | } 84 | } 85 | 86 | @Override 87 | /** 88 | * 89 | */ 90 | protected void finishPage(IProgressMonitor monitor) 91 | throws InterruptedException, CoreException { 92 | this.run(monitor); 93 | } 94 | 95 | @Override 96 | public IJavaElement getCreatedElement() { 97 | return page.getCreatedType().getPrimaryElement(); 98 | } 99 | } 100 | -------------------------------------------------------------------------------- /src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/NewMapperWizard.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.hadoop.eclipse; 20 | 21 | import java.io.IOException; 22 | import java.util.Arrays; 23 | 24 | import org.eclipse.core.resources.IFile; 25 | import org.eclipse.core.runtime.CoreException; 26 | import org.eclipse.core.runtime.FileLocator; 27 | import org.eclipse.core.runtime.IProgressMonitor; 28 | import org.eclipse.core.runtime.IStatus; 29 | import org.eclipse.core.runtime.Path; 30 | import org.eclipse.jdt.core.IJavaElement; 31 | import org.eclipse.jdt.core.IType; 32 | import org.eclipse.jdt.internal.ui.wizards.NewElementWizard; 33 | import org.eclipse.jdt.ui.wizards.NewTypeWizardPage; 34 | import org.eclipse.jface.operation.IRunnableWithProgress; 35 | import org.eclipse.jface.resource.ImageDescriptor; 36 | import org.eclipse.jface.viewers.IStructuredSelection; 37 | import org.eclipse.swt.SWT; 38 | import org.eclipse.swt.layout.GridLayout; 39 | import org.eclipse.swt.widgets.Button; 40 | import org.eclipse.swt.widgets.Composite; 41 | import org.eclipse.ui.INewWizard; 42 | import org.eclipse.ui.IWorkbench; 43 | 44 | /** 45 | * Wizard for creating a new Mapper class (a class that runs the Map portion 46 | * of a MapReduce job). The class is pre-filled with a template. 47 | * 48 | */ 49 | 50 | public class NewMapperWizard extends NewElementWizard implements INewWizard, 51 | IRunnableWithProgress { 52 | private Page page; 53 | 54 | public NewMapperWizard() { 55 | setWindowTitle("New Mapper"); 56 | } 57 | 58 | public void run(IProgressMonitor monitor) { 59 | try { 60 | page.createType(monitor); 61 | } catch (CoreException e) { 62 | // TODO Auto-generated catch block 63 | e.printStackTrace(); 64 | } catch (InterruptedException e) { 65 | // TODO Auto-generated catch block 66 | e.printStackTrace(); 67 | } 68 | } 69 | 70 | @Override 71 | public void init(IWorkbench workbench, IStructuredSelection selection) { 72 | super.init(workbench, selection); 73 | 74 | page = new Page(); 75 | addPage(page); 76 | page.setSelection(selection); 77 | } 78 | 79 | public static class Page extends NewTypeWizardPage { 80 | private Button isCreateMapMethod; 81 | 82 | public Page() { 83 | super(true, "Mapper"); 84 | 85 | setTitle("Mapper"); 86 | setDescription("Create a new Mapper implementation."); 87 | setImageDescriptor(ImageLibrary.get("wizard.mapper.new")); 88 | } 89 | 90 | public void setSelection(IStructuredSelection selection) { 91 | initContainerPage(getInitialJavaElement(selection)); 92 | initTypePage(getInitialJavaElement(selection)); 93 | } 94 | 95 | @Override 96 | public void createType(IProgressMonitor monitor) throws CoreException, 97 | InterruptedException { 98 | super.createType(monitor); 99 | } 100 | 101 | @Override 102 | protected void createTypeMembers(IType newType, ImportsManager imports, 103 | IProgressMonitor monitor) throws CoreException { 104 | super.createTypeMembers(newType, imports, monitor); 105 | imports.addImport("java.io.IOException"); 106 | imports.addImport("org.apache.hadoop.io.LongWritable"); 107 | imports.addImport("org.apache.hadoop.io.Text"); 108 | newType 109 | .createMethod( 110 | "public void map(LongWritable ikey, Text ivalue, Context context) throws IOException,InterruptedException {\n\n}\n", 111 | null, false, monitor); 112 | } 113 | 114 | public void createControl(Composite parent) { 115 | // super.createControl(parent); 116 | 117 | initializeDialogUnits(parent); 118 | Composite composite = new Composite(parent, SWT.NONE); 119 | GridLayout layout = new GridLayout(); 120 | layout.numColumns = 4; 121 | composite.setLayout(layout); 122 | 123 | createContainerControls(composite, 4); 124 | createPackageControls(composite, 4); 125 | createSeparator(composite, 4); 126 | createTypeNameControls(composite, 4); 127 | createSuperClassControls(composite, 4); 128 | createSuperInterfacesControls(composite, 4); 129 | // createSeparator(composite, 4); 130 | 131 | setControl(composite); 132 | 133 | setSuperClass("org.apache.hadoop.mapreduce.Mapper", true); 134 | 135 | setFocus(); 136 | validate(); 137 | } 138 | 139 | @Override 140 | protected void handleFieldChanged(String fieldName) { 141 | super.handleFieldChanged(fieldName); 142 | 143 | validate(); 144 | } 145 | 146 | private void validate() { 147 | updateStatus(new IStatus[] { fContainerStatus, fPackageStatus, 148 | fTypeNameStatus, fSuperClassStatus, fSuperInterfacesStatus }); 149 | } 150 | } 151 | 152 | @Override 153 | public boolean performFinish() { 154 | if (super.performFinish()) { 155 | if (getCreatedElement() != null) { 156 | openResource((IFile) page.getModifiedResource()); 157 | selectAndReveal(page.getModifiedResource()); 158 | } 159 | 160 | return true; 161 | } else { 162 | return false; 163 | } 164 | } 165 | 166 | @Override 167 | protected void finishPage(IProgressMonitor monitor) 168 | throws InterruptedException, CoreException { 169 | this.run(monitor); 170 | } 171 | 172 | @Override 173 | public IJavaElement getCreatedElement() { 174 | return page.getCreatedType().getPrimaryElement(); 175 | } 176 | 177 | } 178 | -------------------------------------------------------------------------------- /src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/NewReducerWizard.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.hadoop.eclipse; 20 | 21 | import java.io.IOException; 22 | import java.util.Arrays; 23 | 24 | import org.eclipse.core.resources.IFile; 25 | import org.eclipse.core.runtime.CoreException; 26 | import org.eclipse.core.runtime.FileLocator; 27 | import org.eclipse.core.runtime.IProgressMonitor; 28 | import org.eclipse.core.runtime.IStatus; 29 | import org.eclipse.core.runtime.Path; 30 | import org.eclipse.jdt.core.IJavaElement; 31 | import org.eclipse.jdt.core.IType; 32 | import org.eclipse.jdt.internal.ui.wizards.NewElementWizard; 33 | import org.eclipse.jdt.ui.wizards.NewTypeWizardPage; 34 | import org.eclipse.jface.operation.IRunnableWithProgress; 35 | import org.eclipse.jface.resource.ImageDescriptor; 36 | import org.eclipse.jface.viewers.IStructuredSelection; 37 | import org.eclipse.swt.SWT; 38 | import org.eclipse.swt.layout.GridLayout; 39 | import org.eclipse.swt.widgets.Composite; 40 | import org.eclipse.ui.INewWizard; 41 | import org.eclipse.ui.IWorkbench; 42 | 43 | /** 44 | * Wizard for creating a new Reducer class (a class that runs the Reduce 45 | * portion of a MapReduce job). The class is pre-filled with a template. 46 | * 47 | */ 48 | 49 | public class NewReducerWizard extends NewElementWizard implements 50 | INewWizard, IRunnableWithProgress { 51 | private Page page; 52 | 53 | public NewReducerWizard() { 54 | setWindowTitle("New Reducer"); 55 | } 56 | 57 | public void run(IProgressMonitor monitor) { 58 | try { 59 | page.createType(monitor); 60 | } catch (CoreException e) { 61 | // TODO Auto-generated catch block 62 | e.printStackTrace(); 63 | } catch (InterruptedException e) { 64 | // TODO Auto-generated catch block 65 | e.printStackTrace(); 66 | } 67 | } 68 | 69 | @Override 70 | public void init(IWorkbench workbench, IStructuredSelection selection) { 71 | super.init(workbench, selection); 72 | 73 | page = new Page(); 74 | addPage(page); 75 | page.setSelection(selection); 76 | } 77 | 78 | public static class Page extends NewTypeWizardPage { 79 | public Page() { 80 | super(true, "Reducer"); 81 | 82 | setTitle("Reducer"); 83 | setDescription("Create a new Reducer implementation."); 84 | setImageDescriptor(ImageLibrary.get("wizard.reducer.new")); 85 | } 86 | 87 | public void setSelection(IStructuredSelection selection) { 88 | initContainerPage(getInitialJavaElement(selection)); 89 | initTypePage(getInitialJavaElement(selection)); 90 | } 91 | 92 | @Override 93 | public void createType(IProgressMonitor monitor) throws CoreException, 94 | InterruptedException { 95 | super.createType(monitor); 96 | } 97 | 98 | @Override 99 | protected void createTypeMembers(IType newType, ImportsManager imports, 100 | IProgressMonitor monitor) throws CoreException { 101 | super.createTypeMembers(newType, imports, monitor); 102 | imports.addImport("java.io.IOException"); 103 | imports.addImport("org.apache.hadoop.io.Text"); 104 | newType 105 | .createMethod( 106 | "public void reduce(Text _key, Iterable values, Context context) throws IOException,InterruptedException \n{\n" 107 | + "\t// process values\n" 108 | + "\tfor(Text val:values){\n\n" 109 | + "\t}\n" + "}\n", null, false, 110 | monitor); 111 | } 112 | 113 | public void createControl(Composite parent) { 114 | // super.createControl(parent); 115 | 116 | initializeDialogUnits(parent); 117 | Composite composite = new Composite(parent, SWT.NONE); 118 | GridLayout layout = new GridLayout(); 119 | layout.numColumns = 4; 120 | composite.setLayout(layout); 121 | 122 | createContainerControls(composite, 4); 123 | createPackageControls(composite, 4); 124 | createSeparator(composite, 4); 125 | createTypeNameControls(composite, 4); 126 | createSuperClassControls(composite, 4); 127 | createSuperInterfacesControls(composite, 4); 128 | // createSeparator(composite, 4); 129 | 130 | setControl(composite); 131 | 132 | setSuperClass("org.apache.hadoop.mapreduce.Reducer", true); 133 | 134 | setFocus(); 135 | validate(); 136 | } 137 | 138 | @Override 139 | protected void handleFieldChanged(String fieldName) { 140 | super.handleFieldChanged(fieldName); 141 | 142 | validate(); 143 | } 144 | 145 | private void validate() { 146 | updateStatus(new IStatus[] { fContainerStatus, fPackageStatus, 147 | fTypeNameStatus, fSuperClassStatus, fSuperInterfacesStatus }); 148 | } 149 | } 150 | 151 | @Override 152 | public boolean performFinish() { 153 | if (super.performFinish()) { 154 | if (getCreatedElement() != null) { 155 | selectAndReveal(page.getModifiedResource()); 156 | openResource((IFile) page.getModifiedResource()); 157 | } 158 | 159 | return true; 160 | } else { 161 | return false; 162 | } 163 | } 164 | 165 | @Override 166 | protected void finishPage(IProgressMonitor monitor) 167 | throws InterruptedException, CoreException { 168 | this.run(monitor); 169 | } 170 | 171 | @Override 172 | public IJavaElement getCreatedElement() { 173 | return (page.getCreatedType() == null) ? null : page.getCreatedType() 174 | .getPrimaryElement(); 175 | } 176 | } 177 | -------------------------------------------------------------------------------- /src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/PropertyTester.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.hadoop.eclipse; 20 | 21 | import java.util.logging.Logger; 22 | 23 | /** 24 | * Class to help with debugging properties 25 | */ 26 | public class PropertyTester extends 27 | org.eclipse.core.expressions.PropertyTester { 28 | 29 | static Logger log = Logger.getLogger(PropertyTester.class.getName()); 30 | 31 | public PropertyTester() { 32 | } 33 | 34 | public boolean test(Object receiver, String property, Object[] args, 35 | Object expectedValue) { 36 | log.fine("Test property " + property + ", " + receiver.getClass()); 37 | 38 | return true; 39 | 40 | // todo(jz) support test for deployable if module has hadoop nature etc. 41 | } 42 | 43 | } 44 | -------------------------------------------------------------------------------- /src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/actions/EditLocationAction.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.hadoop.eclipse.actions; 20 | 21 | import org.apache.hadoop.eclipse.ImageLibrary; 22 | import org.apache.hadoop.eclipse.server.HadoopServer; 23 | import org.apache.hadoop.eclipse.servers.HadoopLocationWizard; 24 | import org.apache.hadoop.eclipse.view.servers.ServerView; 25 | import org.eclipse.jface.action.Action; 26 | import org.eclipse.jface.wizard.Wizard; 27 | import org.eclipse.jface.wizard.WizardDialog; 28 | 29 | /** 30 | * Editing server properties action 31 | */ 32 | public class EditLocationAction extends Action { 33 | 34 | private ServerView serverView; 35 | 36 | public EditLocationAction(ServerView serverView) { 37 | this.serverView = serverView; 38 | 39 | setText("Edit Hadoop location..."); 40 | setImageDescriptor(ImageLibrary.get("server.view.action.location.edit")); 41 | } 42 | 43 | @Override 44 | public void run() { 45 | 46 | final HadoopServer server = serverView.getSelectedServer(); 47 | if (server == null) 48 | return; 49 | 50 | WizardDialog dialog = new WizardDialog(null, new Wizard() { 51 | private HadoopLocationWizard page = new HadoopLocationWizard(server); 52 | 53 | @Override 54 | public void addPages() { 55 | super.addPages(); 56 | setWindowTitle("Edit Hadoop location..."); 57 | addPage(page); 58 | } 59 | 60 | @Override 61 | public boolean performFinish() { 62 | page.performFinish(); 63 | return true; 64 | } 65 | }); 66 | 67 | dialog.create(); 68 | dialog.setBlockOnOpen(true); 69 | dialog.open(); 70 | 71 | super.run(); 72 | } 73 | } 74 | -------------------------------------------------------------------------------- /src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/actions/NewLocationAction.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.hadoop.eclipse.actions; 20 | 21 | import org.apache.hadoop.eclipse.ImageLibrary; 22 | import org.apache.hadoop.eclipse.servers.HadoopLocationWizard; 23 | import org.eclipse.jface.action.Action; 24 | import org.eclipse.jface.wizard.Wizard; 25 | import org.eclipse.jface.wizard.WizardDialog; 26 | 27 | 28 | /** 29 | * Action corresponding to creating a new MapReduce Server. 30 | */ 31 | 32 | public class NewLocationAction extends Action { 33 | public NewLocationAction() { 34 | setText("New Hadoop location..."); 35 | setImageDescriptor(ImageLibrary.get("server.view.action.location.new")); 36 | } 37 | 38 | @Override 39 | public void run() { 40 | WizardDialog dialog = new WizardDialog(null, new Wizard() { 41 | private HadoopLocationWizard page = new HadoopLocationWizard(); 42 | 43 | @Override 44 | public void addPages() { 45 | super.addPages(); 46 | setWindowTitle("New Hadoop location..."); 47 | addPage(page); 48 | } 49 | 50 | @Override 51 | public boolean performFinish() { 52 | page.performFinish(); 53 | return true; 54 | } 55 | 56 | }); 57 | 58 | dialog.create(); 59 | dialog.setBlockOnOpen(true); 60 | dialog.open(); 61 | 62 | super.run(); 63 | } 64 | } 65 | -------------------------------------------------------------------------------- /src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/actions/OpenNewMRClassWizardAction.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.hadoop.eclipse.actions; 20 | 21 | import java.util.logging.Logger; 22 | 23 | import org.apache.hadoop.eclipse.NewDriverWizard; 24 | import org.apache.hadoop.eclipse.NewMapperWizard; 25 | import org.apache.hadoop.eclipse.NewReducerWizard; 26 | import org.eclipse.jface.action.Action; 27 | import org.eclipse.jface.viewers.StructuredSelection; 28 | import org.eclipse.jface.window.Window; 29 | import org.eclipse.jface.wizard.WizardDialog; 30 | import org.eclipse.ui.INewWizard; 31 | import org.eclipse.ui.IWorkbench; 32 | import org.eclipse.ui.PlatformUI; 33 | import org.eclipse.ui.cheatsheets.ICheatSheetAction; 34 | import org.eclipse.ui.cheatsheets.ICheatSheetManager; 35 | 36 | 37 | /** 38 | * Action to open a new MapReduce Class. 39 | */ 40 | 41 | public class OpenNewMRClassWizardAction extends Action implements 42 | ICheatSheetAction { 43 | 44 | static Logger log = Logger.getLogger(OpenNewMRClassWizardAction.class 45 | .getName()); 46 | 47 | public void run(String[] params, ICheatSheetManager manager) { 48 | 49 | if ((params != null) && (params.length > 0)) { 50 | IWorkbench workbench = PlatformUI.getWorkbench(); 51 | INewWizard wizard = getWizard(params[0]); 52 | wizard.init(workbench, new StructuredSelection()); 53 | WizardDialog dialog = new WizardDialog(PlatformUI.getWorkbench() 54 | .getActiveWorkbenchWindow().getShell(), wizard); 55 | dialog.create(); 56 | dialog.open(); 57 | 58 | // did the wizard succeed ? 59 | notifyResult(dialog.getReturnCode() == Window.OK); 60 | } 61 | } 62 | 63 | private INewWizard getWizard(String typeName) { 64 | if (typeName.equals("Mapper")) { 65 | return new NewMapperWizard(); 66 | } else if (typeName.equals("Reducer")) { 67 | return new NewReducerWizard(); 68 | } else if (typeName.equals("Driver")) { 69 | return new NewDriverWizard(); 70 | } else { 71 | log.severe("Invalid Wizard requested"); 72 | return null; 73 | } 74 | } 75 | 76 | } 77 | -------------------------------------------------------------------------------- /src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/actions/OpenNewMRProjectAction.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.hadoop.eclipse.actions; 20 | 21 | import org.apache.hadoop.eclipse.NewMapReduceProjectWizard; 22 | import org.eclipse.jface.action.Action; 23 | import org.eclipse.jface.viewers.StructuredSelection; 24 | import org.eclipse.jface.window.Window; 25 | import org.eclipse.jface.wizard.WizardDialog; 26 | import org.eclipse.swt.widgets.Shell; 27 | import org.eclipse.ui.IWorkbench; 28 | import org.eclipse.ui.PlatformUI; 29 | 30 | /** 31 | * Action to open a new Map/Reduce project. 32 | */ 33 | 34 | public class OpenNewMRProjectAction extends Action { 35 | 36 | @Override 37 | public void run() { 38 | IWorkbench workbench = PlatformUI.getWorkbench(); 39 | Shell shell = workbench.getActiveWorkbenchWindow().getShell(); 40 | NewMapReduceProjectWizard wizard = new NewMapReduceProjectWizard(); 41 | wizard.init(workbench, new StructuredSelection()); 42 | WizardDialog dialog = new WizardDialog(shell, wizard); 43 | dialog.create(); 44 | dialog.open(); 45 | // did the wizard succeed? 46 | notifyResult(dialog.getReturnCode() == Window.OK); 47 | } 48 | } 49 | -------------------------------------------------------------------------------- /src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/ActionProvider.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.hadoop.eclipse.dfs; 20 | 21 | import org.apache.hadoop.eclipse.ImageLibrary; 22 | import org.apache.hadoop.eclipse.actions.DFSActionImpl; 23 | import org.eclipse.jface.action.Action; 24 | import org.eclipse.jface.action.IMenuManager; 25 | import org.eclipse.jface.resource.ImageDescriptor; 26 | import org.eclipse.jface.viewers.ISelection; 27 | import org.eclipse.jface.viewers.IStructuredSelection; 28 | import org.eclipse.ui.IActionBars; 29 | import org.eclipse.ui.PlatformUI; 30 | import org.eclipse.ui.actions.ActionFactory; 31 | import org.eclipse.ui.navigator.CommonActionProvider; 32 | import org.eclipse.ui.navigator.ICommonActionConstants; 33 | import org.eclipse.ui.navigator.ICommonActionExtensionSite; 34 | import org.eclipse.ui.navigator.ICommonMenuConstants; 35 | 36 | /** 37 | * Allows the user to delete and refresh items in the DFS tree 38 | */ 39 | 40 | public class ActionProvider extends CommonActionProvider { 41 | 42 | private static ICommonActionExtensionSite site; 43 | 44 | public ActionProvider() { 45 | } 46 | 47 | /* @inheritDoc */ 48 | @Override 49 | public void init(ICommonActionExtensionSite site) { 50 | if (ActionProvider.site != null) { 51 | System.err.printf("%s: Multiple init()\n", this.getClass() 52 | .getCanonicalName()); 53 | return; 54 | } 55 | super.init(site); 56 | ActionProvider.site = site; 57 | } 58 | 59 | /* @inheritDoc */ 60 | @Override 61 | public void fillActionBars(IActionBars actionBars) { 62 | actionBars.setGlobalActionHandler(ActionFactory.DELETE.getId(), 63 | new DFSAction(DFSActions.DELETE)); 64 | actionBars.setGlobalActionHandler(ActionFactory.REFRESH.getId(), 65 | new DFSAction(DFSActions.REFRESH)); 66 | 67 | if (site == null) 68 | return; 69 | 70 | if ((site.getStructuredViewer().getSelection() instanceof IStructuredSelection) 71 | && (((IStructuredSelection) site.getStructuredViewer() 72 | .getSelection()).size() == 1) 73 | && (((IStructuredSelection) site.getStructuredViewer() 74 | .getSelection()).getFirstElement() instanceof DFSFile)) { 75 | 76 | actionBars.setGlobalActionHandler(ICommonActionConstants.OPEN, 77 | new DFSAction(DFSActions.OPEN)); 78 | } 79 | 80 | actionBars.updateActionBars(); 81 | } 82 | 83 | /* @inheritDoc */ 84 | @Override 85 | public void fillContextMenu(IMenuManager menu) { 86 | /* 87 | * Actions on multiple selections 88 | */ 89 | menu.appendToGroup(ICommonMenuConstants.GROUP_EDIT, new DFSAction( 90 | DFSActions.DELETE)); 91 | 92 | menu.appendToGroup(ICommonMenuConstants.GROUP_OPEN, new DFSAction( 93 | DFSActions.REFRESH)); 94 | 95 | menu.appendToGroup(ICommonMenuConstants.GROUP_NEW, new DFSAction( 96 | DFSActions.DOWNLOAD)); 97 | 98 | if (site == null) 99 | return; 100 | 101 | ISelection isel = site.getStructuredViewer().getSelection(); 102 | if (!(isel instanceof IStructuredSelection)) 103 | return; 104 | 105 | /* 106 | * Actions on single selections only 107 | */ 108 | 109 | IStructuredSelection issel = (IStructuredSelection) isel; 110 | if (issel.size() != 1) 111 | return; 112 | Object element = issel.getFirstElement(); 113 | 114 | if (element instanceof DFSFile) { 115 | menu.appendToGroup(ICommonMenuConstants.GROUP_OPEN, new DFSAction( 116 | DFSActions.OPEN)); 117 | 118 | } else if (element instanceof DFSFolder) { 119 | menu.appendToGroup(ICommonMenuConstants.GROUP_NEW, new DFSAction( 120 | DFSActions.MKDIR)); 121 | menu.appendToGroup(ICommonMenuConstants.GROUP_NEW, new DFSAction( 122 | DFSActions.UPLOAD_FILES)); 123 | menu.appendToGroup(ICommonMenuConstants.GROUP_NEW, new DFSAction( 124 | DFSActions.UPLOAD_DIR)); 125 | 126 | } else if (element instanceof DFSLocation) { 127 | menu.appendToGroup(ICommonMenuConstants.GROUP_OPEN, new DFSAction( 128 | DFSActions.RECONNECT)); 129 | 130 | } else if (element instanceof DFSLocationsRoot) { 131 | menu.appendToGroup(ICommonMenuConstants.GROUP_OPEN, new DFSAction( 132 | DFSActions.DISCONNECT)); 133 | } 134 | 135 | } 136 | 137 | /** 138 | * Representation of an action on a DFS entry in the browser 139 | */ 140 | public static class DFSAction extends Action { 141 | 142 | private final String id; 143 | 144 | private final String title; 145 | 146 | private DFSActions action; 147 | 148 | public DFSAction(String id, String title) { 149 | this.id = id; 150 | this.title = title; 151 | } 152 | 153 | public DFSAction(DFSActions action) { 154 | this.id = action.id; 155 | this.title = action.title; 156 | } 157 | 158 | /* @inheritDoc */ 159 | @Override 160 | public String getText() { 161 | return this.title; 162 | } 163 | 164 | /* @inheritDoc */ 165 | @Override 166 | public ImageDescriptor getImageDescriptor() { 167 | return ImageLibrary.get(getActionDefinitionId()); 168 | } 169 | 170 | /* @inheritDoc */ 171 | @Override 172 | public String getActionDefinitionId() { 173 | return id; 174 | } 175 | 176 | /* @inheritDoc */ 177 | @Override 178 | public void run() { 179 | DFSActionImpl action = new DFSActionImpl(); 180 | action.setActivePart(this, PlatformUI.getWorkbench() 181 | .getActiveWorkbenchWindow().getActivePage().getActivePart()); 182 | action.selectionChanged(this, site.getStructuredViewer() 183 | .getSelection()); 184 | action.run(this); 185 | } 186 | 187 | /* @inheritDoc */ 188 | @Override 189 | public boolean isEnabled() { 190 | return true; 191 | } 192 | } 193 | } 194 | -------------------------------------------------------------------------------- /src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/DFSActions.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.hadoop.eclipse.dfs; 20 | 21 | public enum DFSActions { 22 | 23 | DELETE("Delete"), REFRESH("Refresh"), DOWNLOAD("Download from DFS..."), OPEN( 24 | "View"), MKDIR("Create new directory..."), UPLOAD_FILES( 25 | "Upload files to DFS..."), UPLOAD_DIR("Upload directory to DFS..."), RECONNECT( 26 | "Reconnect"), DISCONNECT("Disconnect"); 27 | 28 | final String title; 29 | 30 | final String id; 31 | 32 | private static final String PREFIX = "dfs.browser.action."; 33 | 34 | public static DFSActions getById(String def) { 35 | if (!def.startsWith(PREFIX)) 36 | return null; 37 | return valueOf(def.substring(PREFIX.length()).toUpperCase()); 38 | } 39 | 40 | DFSActions(String title) { 41 | this.title = title; 42 | this.id = PREFIX + this.name().toLowerCase(); 43 | } 44 | } 45 | -------------------------------------------------------------------------------- /src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/DFSContent.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.hadoop.eclipse.dfs; 20 | 21 | /** 22 | * Interface to define content entities in the DFS browser 23 | */ 24 | public interface DFSContent { 25 | 26 | boolean hasChildren(); 27 | 28 | DFSContent[] getChildren(); 29 | 30 | void refresh(); 31 | 32 | } 33 | -------------------------------------------------------------------------------- /src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/DFSContentProvider.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.hadoop.eclipse.dfs; 20 | 21 | import java.util.HashMap; 22 | import java.util.Map; 23 | 24 | import org.apache.hadoop.eclipse.ImageLibrary; 25 | import org.apache.hadoop.eclipse.server.HadoopServer; 26 | import org.apache.hadoop.eclipse.servers.ServerRegistry; 27 | import org.eclipse.jface.viewers.ILabelProvider; 28 | import org.eclipse.jface.viewers.ILabelProviderListener; 29 | import org.eclipse.jface.viewers.ITreeContentProvider; 30 | import org.eclipse.jface.viewers.StructuredViewer; 31 | import org.eclipse.jface.viewers.Viewer; 32 | import org.eclipse.swt.graphics.Image; 33 | import org.eclipse.swt.widgets.Display; 34 | 35 | /** 36 | * Handles viewing of DFS locations 37 | *

38 | * 39 | * The content handled by this provider is a tree: 40 | * 41 | * 42 | *
DFSLocationsRoot 43 | *
\_HadoopServer 44 | *
| \_DfsFolder 45 | *
| | \_DfsFile 46 | *
| \_DfsFolder 47 | *
| ... 48 | *
\_HadoopServer... 49 | *
50 | * 51 | * The code should not block here: blocking operations need to be done 52 | * asynchronously so as not to freeze the UI! 53 | */ 54 | public class DFSContentProvider implements ITreeContentProvider, 55 | ILabelProvider { 56 | 57 | /** 58 | * The viewer that displays this Tree content 59 | */ 60 | private Viewer viewer; 61 | 62 | private StructuredViewer sviewer; 63 | 64 | private Map rootFolders = 65 | new HashMap(); 66 | 67 | /** 68 | * Constructor: load resources (icons). 69 | */ 70 | public DFSContentProvider() { 71 | } 72 | 73 | private final DFSLocationsRoot locationsRoot = new DFSLocationsRoot(this); 74 | 75 | /* 76 | * ITreeContentProvider implementation 77 | */ 78 | 79 | /* @inheritDoc */ 80 | public Object[] getChildren(Object parent) { 81 | 82 | if (!(parent instanceof DFSContent)) 83 | return null; 84 | DFSContent content = (DFSContent) parent; 85 | return content.getChildren(); 86 | } 87 | 88 | public Object[] test(Object parentElement) { 89 | if (parentElement instanceof DFSLocationsRoot) { 90 | return ServerRegistry.getInstance().getServers().toArray(); 91 | 92 | } else if (parentElement instanceof HadoopServer) { 93 | final HadoopServer location = (HadoopServer) parentElement; 94 | Object root = rootFolders.get(location); 95 | if (root != null) 96 | return new Object[] { root }; 97 | 98 | return new Object[] { "Connecting to DFS..." }; 99 | 100 | } else if (parentElement instanceof DFSFolder) { 101 | DFSFolder folder = (DFSFolder) parentElement; 102 | return folder.getChildren(); 103 | } 104 | 105 | return new Object[] { "" }; 106 | } 107 | 108 | /* @inheritDoc */ 109 | public Object getParent(Object element) { 110 | 111 | if (element instanceof DFSPath) { 112 | return ((DFSPath) element).getParent(); 113 | 114 | } else if (element instanceof HadoopServer) { 115 | return locationsRoot; 116 | } 117 | 118 | return null; 119 | } 120 | 121 | /* @inheritDoc */ 122 | public boolean hasChildren(Object element) { 123 | if (element instanceof DFSContent) { 124 | DFSContent content = (DFSContent) element; 125 | return content.hasChildren(); 126 | } 127 | return false; 128 | } 129 | 130 | /* 131 | * IStructureContentProvider implementation 132 | */ 133 | 134 | /* @inheritDoc */ 135 | public Object[] getElements(final Object inputElement) { 136 | return new Object[] { locationsRoot }; 137 | // return ServerRegistry.getInstance().getServers().toArray(); 138 | } 139 | 140 | /* 141 | * ILabelProvider implementation 142 | */ 143 | 144 | /* @inheritDoc */ 145 | public Image getImage(Object element) { 146 | if (element instanceof DFSLocationsRoot) 147 | return ImageLibrary.getImage("dfs.browser.root.entry"); 148 | 149 | else if (element instanceof DFSLocation) 150 | return ImageLibrary.getImage("dfs.browser.location.entry"); 151 | 152 | else if (element instanceof DFSFolder) 153 | return ImageLibrary.getImage("dfs.browser.folder.entry"); 154 | 155 | else if (element instanceof DFSFile) 156 | return ImageLibrary.getImage("dfs.browser.file.entry"); 157 | 158 | return null; 159 | } 160 | 161 | /* @inheritDoc */ 162 | public String getText(Object element) { 163 | if (element instanceof DFSFile) 164 | return ((DFSFile) element).toDetailedString(); 165 | 166 | return element.toString(); 167 | } 168 | 169 | /* 170 | * IBaseLabelProvider implementation 171 | */ 172 | 173 | /* @inheritDoc */ 174 | public void addListener(ILabelProviderListener listener) { 175 | } 176 | 177 | /* @inheritDoc */ 178 | public void removeListener(ILabelProviderListener listener) { 179 | } 180 | 181 | /* @inheritDoc */ 182 | public boolean isLabelProperty(Object element, String property) { 183 | return false; 184 | } 185 | 186 | /* 187 | * IContentProvider implementation 188 | */ 189 | 190 | /* @inheritDoc */ 191 | public void dispose() { 192 | } 193 | 194 | /* @inheritDoc */ 195 | public void inputChanged(Viewer viewer, Object oldInput, Object newInput) { 196 | this.viewer = viewer; 197 | if ((viewer != null) && (viewer instanceof StructuredViewer)) 198 | this.sviewer = (StructuredViewer) viewer; 199 | else 200 | this.sviewer = null; 201 | } 202 | 203 | /* 204 | * Miscellaneous 205 | */ 206 | 207 | /** 208 | * Ask the viewer for this content to refresh 209 | */ 210 | void refresh() { 211 | // no display, nothing to update 212 | if (this.viewer == null) 213 | return; 214 | 215 | Display.getDefault().asyncExec(new Runnable() { 216 | public void run() { 217 | DFSContentProvider.this.viewer.refresh(); 218 | } 219 | }); 220 | } 221 | 222 | /** 223 | * Ask the viewer to refresh a single element 224 | * 225 | * @param content what to refresh 226 | */ 227 | void refresh(final DFSContent content) { 228 | if (this.sviewer != null) { 229 | Display.getDefault().asyncExec(new Runnable() { 230 | public void run() { 231 | DFSContentProvider.this.sviewer.refresh(content); 232 | } 233 | }); 234 | 235 | } else { 236 | refresh(); 237 | } 238 | } 239 | 240 | Viewer getViewer() { 241 | return this.viewer; 242 | } 243 | 244 | } 245 | -------------------------------------------------------------------------------- /src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/DFSFolder.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.hadoop.eclipse.dfs; 20 | 21 | import java.io.File; 22 | import java.io.IOException; 23 | import java.util.ArrayList; 24 | import java.util.List; 25 | import java.util.logging.Logger; 26 | 27 | import org.apache.hadoop.eclipse.server.HadoopServer; 28 | import org.apache.hadoop.fs.FileStatus; 29 | import org.apache.hadoop.fs.Path; 30 | import org.eclipse.core.runtime.IProgressMonitor; 31 | import org.eclipse.core.runtime.IStatus; 32 | import org.eclipse.core.runtime.Status; 33 | import org.eclipse.core.runtime.jobs.Job; 34 | import org.eclipse.jface.dialogs.MessageDialog; 35 | 36 | /** 37 | * Local representation of a folder in the DFS. 38 | * 39 | * The constructor creates an empty representation of the folder and spawn a 40 | * thread that will fill 41 | */ 42 | public class DFSFolder extends DFSPath implements DFSContent { 43 | 44 | static Logger log = Logger.getLogger(DFSFolder.class.getName()); 45 | 46 | private DFSContent[] children; 47 | 48 | protected DFSFolder(DFSContentProvider provider, HadoopServer location) 49 | throws IOException { 50 | 51 | super(provider, location); 52 | } 53 | 54 | private DFSFolder(DFSPath parent, Path path) { 55 | super(parent, path); 56 | } 57 | 58 | protected void loadDFSFolderChildren() throws IOException { 59 | List list = new ArrayList(); 60 | 61 | for (FileStatus status : getDFS().listStatus(this.getPath())) { 62 | if (status.isDir()) { 63 | list.add(new DFSFolder(this, status.getPath())); 64 | } else { 65 | list.add(new DFSFile(this, status.getPath())); 66 | } 67 | } 68 | 69 | this.children = list.toArray(new DFSContent[list.size()]); 70 | } 71 | 72 | /** 73 | * Upload the given file or directory into this DfsFolder 74 | * 75 | * @param file 76 | * @throws IOException 77 | */ 78 | public void upload(IProgressMonitor monitor, final File file) 79 | throws IOException { 80 | 81 | if (file.isDirectory()) { 82 | Path filePath = new Path(this.path, file.getName()); 83 | getDFS().mkdirs(filePath); 84 | DFSFolder newFolder = new DFSFolder(this, filePath); 85 | monitor.worked(1); 86 | for (File child : file.listFiles()) { 87 | if (monitor.isCanceled()) 88 | return; 89 | newFolder.upload(monitor, child); 90 | } 91 | 92 | } else if (file.isFile()) { 93 | Path filePath = new Path(this.path, file.getName()); 94 | DFSFile newFile = new DFSFile(this, filePath, file, monitor); 95 | 96 | } else { 97 | // XXX don't know what the file is? 98 | } 99 | } 100 | 101 | /* @inheritDoc */ 102 | @Override 103 | public void downloadToLocalDirectory(IProgressMonitor monitor, File dir) { 104 | if (!dir.exists()) 105 | dir.mkdirs(); 106 | 107 | if (!dir.isDirectory()) { 108 | MessageDialog.openError(null, "Download to local file system", 109 | "Invalid directory location: \"" + dir + "\""); 110 | return; 111 | } 112 | 113 | File dfsPath = new File(this.getPath().toString()); 114 | File destination = new File(dir, dfsPath.getName()); 115 | 116 | if (!destination.exists()) { 117 | if (!destination.mkdir()) { 118 | MessageDialog.openError(null, "Download to local directory", 119 | "Unable to create directory " + destination.getAbsolutePath()); 120 | return; 121 | } 122 | } 123 | 124 | // Download all DfsPath children 125 | for (Object childObj : getChildren()) { 126 | if (childObj instanceof DFSPath) { 127 | ((DFSPath) childObj).downloadToLocalDirectory(monitor, destination); 128 | monitor.worked(1); 129 | } 130 | } 131 | } 132 | 133 | /* @inheritDoc */ 134 | @Override 135 | public int computeDownloadWork() { 136 | int work = 1; 137 | for (DFSContent child : getChildren()) { 138 | if (child instanceof DFSPath) 139 | work += ((DFSPath) child).computeDownloadWork(); 140 | } 141 | 142 | return work; 143 | } 144 | 145 | /** 146 | * Create a new sub directory into this directory 147 | * 148 | * @param folderName 149 | */ 150 | public void mkdir(String folderName) { 151 | try { 152 | getDFS().mkdirs(new Path(this.path, folderName)); 153 | } catch (IOException ioe) { 154 | ioe.printStackTrace(); 155 | } 156 | doRefresh(); 157 | } 158 | 159 | /* 160 | * Implementation of DFSContent 161 | */ 162 | 163 | /* @inheritDoc */ 164 | public boolean hasChildren() { 165 | if (this.children == null) 166 | return true; 167 | else 168 | return (this.children.length > 0); 169 | } 170 | 171 | /* @inheritDoc */ 172 | public DFSContent[] getChildren() { 173 | if (children == null) { 174 | new Job("Connecting to DFS " + location) { 175 | @Override 176 | protected IStatus run(IProgressMonitor monitor) { 177 | try { 178 | loadDFSFolderChildren(); 179 | return Status.OK_STATUS; 180 | 181 | } catch (IOException ioe) { 182 | children = 183 | new DFSContent[] { new DFSMessage("Error: " 184 | + ioe.getLocalizedMessage()) }; 185 | return Status.CANCEL_STATUS; 186 | 187 | } finally { 188 | // Under all circumstances, update the UI 189 | provider.refresh(DFSFolder.this); 190 | } 191 | } 192 | }.schedule(); 193 | 194 | return new DFSContent[] { new DFSMessage("Listing folder content...") }; 195 | } 196 | return this.children; 197 | } 198 | 199 | /* @inheritDoc */ 200 | @Override 201 | public void refresh() { 202 | this.children = null; 203 | this.doRefresh(); 204 | } 205 | 206 | /* @inheritDoc */ 207 | @Override 208 | public String toString() { 209 | return String.format("%s (%s)", super.toString(), 210 | this.getChildren().length); 211 | } 212 | 213 | } 214 | -------------------------------------------------------------------------------- /src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/DFSLocation.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.hadoop.eclipse.dfs; 20 | 21 | import java.io.IOException; 22 | 23 | import org.apache.hadoop.eclipse.server.HadoopServer; 24 | import org.eclipse.core.runtime.IProgressMonitor; 25 | import org.eclipse.core.runtime.IStatus; 26 | import org.eclipse.core.runtime.Status; 27 | import org.eclipse.core.runtime.jobs.Job; 28 | 29 | /** 30 | * DFS Content representation of a HDFS location 31 | */ 32 | public class DFSLocation implements DFSContent { 33 | 34 | private final DFSContentProvider provider; 35 | 36 | private final HadoopServer location; 37 | 38 | private DFSContent rootFolder = null; 39 | 40 | DFSLocation(DFSContentProvider provider, HadoopServer server) { 41 | this.provider = provider; 42 | this.location = server; 43 | } 44 | 45 | /* @inheritDoc */ 46 | @Override 47 | public String toString() { 48 | return this.location.getLocationName(); 49 | } 50 | 51 | /* 52 | * Implementation of DFSContent 53 | */ 54 | 55 | /* @inheritDoc */ 56 | public DFSContent[] getChildren() { 57 | if (this.rootFolder == null) { 58 | /* 59 | * DfsFolder constructor might block as it contacts the NameNode: work 60 | * asynchronously here or this will potentially freeze the UI 61 | */ 62 | new Job("Connecting to DFS " + location) { 63 | @Override 64 | protected IStatus run(IProgressMonitor monitor) { 65 | try { 66 | rootFolder = new DFSFolder(provider, location); 67 | return Status.OK_STATUS; 68 | 69 | } catch (IOException ioe) { 70 | rootFolder = 71 | new DFSMessage("Error: " + ioe.getLocalizedMessage()); 72 | return Status.CANCEL_STATUS; 73 | 74 | } finally { 75 | // Under all circumstances, update the UI 76 | provider.refresh(DFSLocation.this); 77 | } 78 | } 79 | }.schedule(); 80 | 81 | return new DFSContent[] { new DFSMessage("Connecting to DFS " 82 | + toString()) }; 83 | } 84 | return new DFSContent[] { this.rootFolder }; 85 | } 86 | 87 | /* @inheritDoc */ 88 | public boolean hasChildren() { 89 | return true; 90 | } 91 | 92 | /* @inheritDoc */ 93 | public void refresh() { 94 | this.rootFolder = null; 95 | this.provider.refresh(this); 96 | } 97 | 98 | /* 99 | * Actions 100 | */ 101 | 102 | /** 103 | * Refresh the location using a new connection 104 | */ 105 | public void reconnect() { 106 | this.refresh(); 107 | } 108 | } 109 | -------------------------------------------------------------------------------- /src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/DFSLocationsRoot.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.hadoop.eclipse.dfs; 20 | 21 | import java.io.IOException; 22 | import java.util.HashMap; 23 | import java.util.Map; 24 | 25 | import org.apache.hadoop.eclipse.server.HadoopServer; 26 | import org.apache.hadoop.eclipse.servers.IHadoopServerListener; 27 | import org.apache.hadoop.eclipse.servers.ServerRegistry; 28 | import org.apache.hadoop.fs.FileSystem; 29 | 30 | /** 31 | * Representation of the root element containing all DFS servers. This 32 | * content registers an observer on Hadoop servers so as to update itself 33 | * when servers are updated. 34 | */ 35 | public class DFSLocationsRoot implements DFSContent, IHadoopServerListener { 36 | 37 | /** 38 | * 39 | */ 40 | private final DFSContentProvider provider; 41 | 42 | private Map map = 43 | new HashMap(); 44 | 45 | /** 46 | * Register a listeners to track DFS locations updates 47 | * 48 | * @param provider the content provider this content is the root of 49 | */ 50 | DFSLocationsRoot(DFSContentProvider provider) { 51 | this.provider = provider; 52 | ServerRegistry.getInstance().addListener(this); 53 | this.refresh(); 54 | } 55 | 56 | /* 57 | * Implementation of IHadoopServerListener 58 | */ 59 | 60 | /* @inheritDoc */ 61 | public synchronized void serverChanged(final HadoopServer location, 62 | final int type) { 63 | 64 | switch (type) { 65 | case ServerRegistry.SERVER_STATE_CHANGED: { 66 | this.provider.refresh(map.get(location)); 67 | break; 68 | } 69 | 70 | case ServerRegistry.SERVER_ADDED: { 71 | DFSLocation dfsLoc = new DFSLocation(provider, location); 72 | map.put(location, dfsLoc); 73 | this.provider.refresh(this); 74 | break; 75 | } 76 | 77 | case ServerRegistry.SERVER_REMOVED: { 78 | map.remove(location); 79 | this.provider.refresh(this); 80 | break; 81 | } 82 | } 83 | } 84 | 85 | /** 86 | * Recompute the map of Hadoop locations 87 | */ 88 | private synchronized void reloadLocations() { 89 | map.clear(); 90 | for (HadoopServer location : ServerRegistry.getInstance().getServers()) 91 | map.put(location, new DFSLocation(provider, location)); 92 | } 93 | 94 | /* @inheritDoc */ 95 | @Override 96 | public String toString() { 97 | return "DFS Locations"; 98 | } 99 | 100 | /* 101 | * Implementation of DFSContent 102 | */ 103 | 104 | /* @inheritDoc */ 105 | public synchronized DFSContent[] getChildren() { 106 | return this.map.values().toArray(new DFSContent[this.map.size()]); 107 | } 108 | 109 | /* @inheritDoc */ 110 | public boolean hasChildren() { 111 | return (this.map.size() > 0); 112 | } 113 | 114 | /* @inheritDoc */ 115 | public void refresh() { 116 | reloadLocations(); 117 | this.provider.refresh(this); 118 | } 119 | 120 | /* 121 | * Actions 122 | */ 123 | 124 | public void disconnect() { 125 | Thread closeThread = new Thread() { 126 | /* @inheritDoc */ 127 | @Override 128 | public void run() { 129 | try { 130 | System.out.printf("Closing all opened File Systems...\n"); 131 | FileSystem.closeAll(); 132 | System.out.printf("File Systems closed\n"); 133 | 134 | } catch (IOException ioe) { 135 | ioe.printStackTrace(); 136 | } 137 | } 138 | }; 139 | 140 | // Wait 5 seconds for the connections to be closed 141 | closeThread.start(); 142 | try { 143 | closeThread.join(5000); 144 | 145 | } catch (InterruptedException ie) { 146 | // Ignore 147 | } 148 | } 149 | 150 | } 151 | -------------------------------------------------------------------------------- /src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/DFSMessage.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.hadoop.eclipse.dfs; 20 | 21 | /** 22 | * DFS Content that displays a message. 23 | */ 24 | class DFSMessage implements DFSContent { 25 | 26 | private String message; 27 | 28 | DFSMessage(String message) { 29 | this.message = message; 30 | } 31 | 32 | /* @inheritDoc */ 33 | @Override 34 | public String toString() { 35 | return this.message; 36 | } 37 | 38 | /* 39 | * Implementation of DFSContent 40 | */ 41 | 42 | /* @inheritDoc */ 43 | public DFSContent[] getChildren() { 44 | return null; 45 | } 46 | 47 | /* @inheritDoc */ 48 | public boolean hasChildren() { 49 | return false; 50 | } 51 | 52 | /* @inheritDoc */ 53 | public void refresh() { 54 | // Nothing to do 55 | } 56 | 57 | } 58 | -------------------------------------------------------------------------------- /src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/dfs/DFSPath.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.hadoop.eclipse.dfs; 20 | 21 | import java.io.File; 22 | import java.io.IOException; 23 | import java.util.logging.Logger; 24 | 25 | import org.apache.hadoop.hdfs.DistributedFileSystem; 26 | import org.apache.hadoop.eclipse.ErrorMessageDialog; 27 | import org.apache.hadoop.eclipse.server.ConfProp; 28 | import org.apache.hadoop.eclipse.server.HadoopServer; 29 | import org.apache.hadoop.fs.FileSystem; 30 | import org.apache.hadoop.fs.Path; 31 | import org.eclipse.core.runtime.IProgressMonitor; 32 | import org.eclipse.jface.dialogs.MessageDialog; 33 | 34 | /** 35 | * DFS Path handling for DFS 36 | */ 37 | public abstract class DFSPath implements DFSContent { 38 | 39 | protected final DFSContentProvider provider; 40 | 41 | protected HadoopServer location; 42 | 43 | private DistributedFileSystem dfs = null; 44 | 45 | protected final Path path; 46 | 47 | protected final DFSPath parent; 48 | 49 | /** 50 | * For debugging purpose 51 | */ 52 | static Logger log = Logger.getLogger(DFSPath.class.getName()); 53 | 54 | /** 55 | * Create a path representation for the given location in the given viewer 56 | * 57 | * @param location 58 | * @param path 59 | * @param viewer 60 | */ 61 | public DFSPath(DFSContentProvider provider, HadoopServer location) 62 | throws IOException { 63 | 64 | this.provider = provider; 65 | this.location = location; 66 | this.path = new Path("/"); 67 | this.parent = null; 68 | } 69 | 70 | /** 71 | * Create a sub-path representation for the given parent path 72 | * 73 | * @param parent 74 | * @param path 75 | */ 76 | protected DFSPath(DFSPath parent, Path path) { 77 | this.provider = parent.provider; 78 | this.location = parent.location; 79 | this.dfs = parent.dfs; 80 | this.parent = parent; 81 | this.path = path; 82 | } 83 | 84 | protected void dispose() { 85 | // Free the DFS connection 86 | } 87 | 88 | /* @inheritDoc */ 89 | @Override 90 | public String toString() { 91 | if (path.equals("/")) { 92 | return location.getConfProp(ConfProp.FS_DEFAULT_URI); 93 | 94 | } else { 95 | return this.path.getName(); 96 | } 97 | } 98 | 99 | /** 100 | * Does a recursive delete of the remote directory tree at this node. 101 | */ 102 | public void delete() { 103 | try { 104 | getDFS().delete(this.path, true); 105 | 106 | } catch (IOException e) { 107 | e.printStackTrace(); 108 | MessageDialog.openWarning(null, "Delete file", 109 | "Unable to delete file \"" + this.path + "\"\n" + e); 110 | } 111 | } 112 | 113 | public DFSPath getParent() { 114 | return parent; 115 | } 116 | 117 | public abstract void refresh(); 118 | 119 | /** 120 | * Refresh the UI element for this content 121 | */ 122 | public void doRefresh() { 123 | provider.refresh(this); 124 | } 125 | 126 | /** 127 | * Copy the DfsPath to the given local directory 128 | * 129 | * @param directory the local directory 130 | */ 131 | public abstract void downloadToLocalDirectory(IProgressMonitor monitor, 132 | File dir); 133 | 134 | public Path getPath() { 135 | return this.path; 136 | } 137 | 138 | /** 139 | * Gets a connection to the DFS 140 | * 141 | * @return a connection to the DFS 142 | * @throws IOException 143 | */ 144 | DistributedFileSystem getDFS() throws IOException { 145 | if (this.dfs == null) { 146 | FileSystem fs = location.getDFS(); 147 | if (!(fs instanceof DistributedFileSystem)) { 148 | ErrorMessageDialog.display("DFS Browser", 149 | "The DFS Browser cannot browse anything else " 150 | + "but a Distributed File System!"); 151 | throw new IOException("DFS Browser expects a DistributedFileSystem!"); 152 | } 153 | this.dfs = (DistributedFileSystem) fs; 154 | } 155 | return this.dfs; 156 | } 157 | 158 | public abstract int computeDownloadWork(); 159 | 160 | } 161 | -------------------------------------------------------------------------------- /src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/launch/HadoopApplicationLaunchShortcut.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.hadoop.eclipse.launch; 20 | 21 | import java.util.ArrayList; 22 | import java.util.List; 23 | import java.util.logging.Logger; 24 | 25 | import org.apache.hadoop.eclipse.servers.RunOnHadoopWizard; 26 | import org.eclipse.core.resources.IFile; 27 | import org.eclipse.core.resources.IResource; 28 | import org.eclipse.core.runtime.CoreException; 29 | import org.eclipse.debug.core.ILaunchConfiguration; 30 | import org.eclipse.debug.core.ILaunchConfigurationType; 31 | import org.eclipse.debug.core.ILaunchConfigurationWorkingCopy; 32 | import org.eclipse.jdt.core.IJavaProject; 33 | import org.eclipse.jdt.core.IType; 34 | import org.eclipse.jdt.core.JavaCore; 35 | import org.eclipse.jdt.debug.ui.launchConfigurations.JavaApplicationLaunchShortcut; 36 | import org.eclipse.jdt.launching.IJavaLaunchConfigurationConstants; 37 | import org.eclipse.jdt.launching.IRuntimeClasspathEntry; 38 | import org.eclipse.jdt.launching.JavaRuntime; 39 | import org.eclipse.jface.wizard.IWizard; 40 | import org.eclipse.jface.wizard.WizardDialog; 41 | import org.eclipse.swt.widgets.Display; 42 | import org.eclipse.swt.widgets.Shell; 43 | 44 | /** 45 | * Add a shortcut "Run on Hadoop" to the Run menu 46 | */ 47 | 48 | public class HadoopApplicationLaunchShortcut extends 49 | JavaApplicationLaunchShortcut { 50 | 51 | static Logger log = 52 | Logger.getLogger(HadoopApplicationLaunchShortcut.class.getName()); 53 | 54 | // private ActionDelegate delegate = new RunOnHadoopActionDelegate(); 55 | 56 | public HadoopApplicationLaunchShortcut() { 57 | } 58 | 59 | /* @inheritDoc */ 60 | @Override 61 | protected ILaunchConfiguration findLaunchConfiguration(IType type, 62 | ILaunchConfigurationType configType) { 63 | 64 | // Find an existing or create a launch configuration (Standard way) 65 | ILaunchConfiguration iConf = 66 | super.findLaunchConfiguration(type, configType); 67 | if (iConf == null) iConf = super.createConfiguration(type); 68 | ILaunchConfigurationWorkingCopy iConfWC; 69 | try { 70 | /* 71 | * Tune the default launch configuration: setup run-time classpath 72 | * manually 73 | */ 74 | iConfWC = iConf.getWorkingCopy(); 75 | 76 | iConfWC.setAttribute( 77 | IJavaLaunchConfigurationConstants.ATTR_DEFAULT_CLASSPATH, false); 78 | 79 | List classPath = new ArrayList(); 80 | IResource resource = type.getResource(); 81 | IJavaProject project = 82 | (IJavaProject) resource.getProject().getNature(JavaCore.NATURE_ID); 83 | IRuntimeClasspathEntry cpEntry = 84 | JavaRuntime.newDefaultProjectClasspathEntry(project); 85 | classPath.add(0, cpEntry.getMemento()); 86 | 87 | iConfWC.setAttribute(IJavaLaunchConfigurationConstants.ATTR_CLASSPATH, 88 | classPath); 89 | 90 | } catch (CoreException e) { 91 | e.printStackTrace(); 92 | // FIXME Error dialog 93 | return null; 94 | } 95 | 96 | /* 97 | * Update the selected configuration with a specific Hadoop location 98 | * target 99 | */ 100 | IResource resource = type.getResource(); 101 | if (!(resource instanceof IFile)) 102 | return null; 103 | RunOnHadoopWizard wizard = 104 | new RunOnHadoopWizard((IFile) resource, iConfWC); 105 | WizardDialog dialog = 106 | new WizardDialog(Display.getDefault().getActiveShell(), wizard); 107 | 108 | dialog.create(); 109 | dialog.setBlockOnOpen(true); 110 | if (dialog.open() != WizardDialog.OK) 111 | return null; 112 | 113 | try { 114 | iConfWC.doSave(); 115 | 116 | } catch (CoreException e) { 117 | e.printStackTrace(); 118 | // FIXME Error dialog 119 | return null; 120 | } 121 | 122 | return iConfWC; 123 | } 124 | 125 | /** 126 | * Was used to run the RunOnHadoopWizard inside and provide it a 127 | * ProgressMonitor 128 | */ 129 | static class Dialog extends WizardDialog { 130 | public Dialog(Shell parentShell, IWizard newWizard) { 131 | super(parentShell, newWizard); 132 | } 133 | 134 | @Override 135 | public void create() { 136 | super.create(); 137 | 138 | ((RunOnHadoopWizard) getWizard()) 139 | .setProgressMonitor(getProgressMonitor()); 140 | } 141 | } 142 | } 143 | -------------------------------------------------------------------------------- /src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/launch/MutexRule.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.hadoop.eclipse.launch; 20 | 21 | import org.eclipse.core.runtime.jobs.ISchedulingRule; 22 | 23 | public class MutexRule implements ISchedulingRule { 24 | private final String id; 25 | 26 | public MutexRule(String id) { 27 | this.id = id; 28 | } 29 | 30 | public boolean contains(ISchedulingRule rule) { 31 | return (rule instanceof MutexRule) && ((MutexRule) rule).id.equals(id); 32 | } 33 | 34 | public boolean isConflicting(ISchedulingRule rule) { 35 | return (rule instanceof MutexRule) && ((MutexRule) rule).id.equals(id); 36 | } 37 | } 38 | -------------------------------------------------------------------------------- /src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/launch/StartHadoopLaunchTabGroup.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.hadoop.eclipse.launch; 20 | 21 | import org.eclipse.debug.ui.AbstractLaunchConfigurationTabGroup; 22 | import org.eclipse.debug.ui.CommonTab; 23 | import org.eclipse.debug.ui.ILaunchConfigurationDialog; 24 | import org.eclipse.debug.ui.ILaunchConfigurationTab; 25 | import org.eclipse.jdt.debug.ui.launchConfigurations.JavaArgumentsTab; 26 | import org.eclipse.jdt.debug.ui.launchConfigurations.JavaClasspathTab; 27 | import org.eclipse.jdt.debug.ui.launchConfigurations.JavaJRETab; 28 | 29 | /** 30 | * Create the tab group for the dialog window for starting a Hadoop job. 31 | */ 32 | 33 | public class StartHadoopLaunchTabGroup extends 34 | AbstractLaunchConfigurationTabGroup { 35 | 36 | public StartHadoopLaunchTabGroup() { 37 | // TODO Auto-generated constructor stub 38 | } 39 | 40 | /** 41 | * TODO(jz) consider the appropriate tabs for this case 42 | */ 43 | public void createTabs(ILaunchConfigurationDialog dialog, String mode) { 44 | setTabs(new ILaunchConfigurationTab[] { new JavaArgumentsTab(), 45 | new JavaJRETab(), new JavaClasspathTab(), new CommonTab() }); 46 | } 47 | } 48 | -------------------------------------------------------------------------------- /src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/preferences/MapReducePreferencePage.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | package org.apache.hadoop.eclipse.preferences; 19 | 20 | import org.apache.hadoop.eclipse.Activator; 21 | import org.eclipse.jface.preference.DirectoryFieldEditor; 22 | import org.eclipse.jface.preference.FieldEditorPreferencePage; 23 | import org.eclipse.ui.IWorkbench; 24 | import org.eclipse.ui.IWorkbenchPreferencePage; 25 | 26 | /** 27 | * This class represents a preference page that is contributed to the 28 | * Preferences dialog. By sub-classing FieldEditorPreferencePage, 29 | * we can use the field support built into JFace that allows us to create a 30 | * page that is small and knows how to save, restore and apply itself. 31 | * 32 | *

33 | * This page is used to modify preferences only. They are stored in the 34 | * preference store that belongs to the main plug-in class. That way, 35 | * preferences can be accessed directly via the preference store. 36 | */ 37 | 38 | public class MapReducePreferencePage extends FieldEditorPreferencePage 39 | implements IWorkbenchPreferencePage { 40 | 41 | public MapReducePreferencePage() { 42 | super(GRID); 43 | setPreferenceStore(Activator.getDefault().getPreferenceStore()); 44 | setTitle("Hadoop Map/Reduce Tools"); 45 | // setDescription("Hadoop Map/Reduce Preferences"); 46 | } 47 | 48 | /** 49 | * Creates the field editors. Field editors are abstractions of the common 50 | * GUI blocks needed to manipulate various types of preferences. Each field 51 | * editor knows how to save and restore itself. 52 | */ 53 | @Override 54 | public void createFieldEditors() { 55 | addField(new DirectoryFieldEditor(PreferenceConstants.P_PATH, 56 | "&Hadoop installation directory:", getFieldEditorParent())); 57 | 58 | } 59 | 60 | /* @inheritDoc */ 61 | public void init(IWorkbench workbench) { 62 | } 63 | 64 | } 65 | -------------------------------------------------------------------------------- /src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/preferences/PreferenceConstants.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.hadoop.eclipse.preferences; 20 | 21 | /** 22 | * Constant definitions for plug-in preferences 23 | */ 24 | public class PreferenceConstants { 25 | 26 | public static final String P_PATH = "pathPreference"; 27 | 28 | // public static final String P_BOOLEAN = "booleanPreference"; 29 | // 30 | // public static final String P_CHOICE = "choicePreference"; 31 | // 32 | // public static final String P_STRING = "stringPreference"; 33 | // 34 | } 35 | -------------------------------------------------------------------------------- /src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/preferences/PreferenceInitializer.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.hadoop.eclipse.preferences; 20 | 21 | import org.eclipse.core.runtime.preferences.AbstractPreferenceInitializer; 22 | 23 | /** 24 | * Class used to initialize default preference values. 25 | */ 26 | public class PreferenceInitializer extends AbstractPreferenceInitializer { 27 | 28 | /* @inheritDoc */ 29 | @Override 30 | public void initializeDefaultPreferences() { 31 | } 32 | 33 | } 34 | -------------------------------------------------------------------------------- /src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/server/ConfProp.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.hadoop.eclipse.server; 20 | 21 | import java.util.HashMap; 22 | import java.util.Map; 23 | 24 | import org.apache.hadoop.conf.Configuration; 25 | 26 | public enum ConfProp { 27 | 28 | 29 | /** 30 | * Property name for the The runtime framework for executing MapReduce jobs. Can be one of local, classic or yarn 31 | */ 32 | PI_MRFRAMEWORK_NAME(false, "mapreduce.framework.name", "local"), 33 | 34 | 35 | /** 36 | * Property name for the applications manager address in the RM. 37 | */ 38 | PI_RESOURCEMANAGER_ADDRESS(false, "yarn.resourcemanager.address", "0.0.0.0:8032"), 39 | 40 | 41 | /** 42 | * Property name for the Hadoop location name 43 | */ 44 | PI_LOCATION_NAME(true, "location.name", "New Hadoop location"), 45 | 46 | /** 47 | * Property name for the master host name (the Job tracker) 48 | */ 49 | PI_JOB_TRACKER_HOST(true, "jobtracker.host", "localhost"), 50 | 51 | /** 52 | * Property name for the DFS master host name (the Name node) 53 | */ 54 | PI_NAME_NODE_HOST(true, "namenode.host", "localhost"), 55 | 56 | /** 57 | * Property name for the installation directory on the master node 58 | */ 59 | // PI_INSTALL_DIR(true, "install.dir", "/dir/hadoop-version/"), 60 | /** 61 | * User name to use for Hadoop operations 62 | */ 63 | PI_USER_NAME(true, "user.name", System.getProperty("user.name", 64 | "who are you?")), 65 | 66 | /** 67 | * Property name for SOCKS proxy activation 68 | */ 69 | PI_SOCKS_PROXY_ENABLE(true, "socks.proxy.enable", "no"), 70 | 71 | /** 72 | * Property name for the SOCKS proxy host 73 | */ 74 | PI_SOCKS_PROXY_HOST(true, "socks.proxy.host", "host"), 75 | 76 | /** 77 | * Property name for the SOCKS proxy port 78 | */ 79 | PI_SOCKS_PROXY_PORT(true, "socks.proxy.port", "1080"), 80 | 81 | /** 82 | * TCP port number for the name node 83 | */ 84 | PI_NAME_NODE_PORT(true, "namenode.port", "50040"), 85 | 86 | /** 87 | * TCP port number for the job tracker 88 | */ 89 | PI_JOB_TRACKER_PORT(true, "jobtracker.port", "50020"), 90 | 91 | /** 92 | * Are the Map/Reduce and the Distributed FS masters hosted on the same 93 | * machine? 94 | */ 95 | PI_COLOCATE_MASTERS(true, "masters.colocate", "yes"), 96 | 97 | /** 98 | * Property name for naming the job tracker (URI). This property is related 99 | * to {@link #PI_MASTER_HOST_NAME} 100 | */ 101 | JOB_TRACKER_URI(false, "mapreduce.job.tracker", "localhost:50020"), 102 | 103 | /** 104 | * Property name for naming the default file system (URI). 105 | */ 106 | FS_DEFAULT_URI(false, "fs.default.name", "hdfs://localhost:50040/"), 107 | 108 | /** 109 | * Property name for the default socket factory: 110 | */ 111 | SOCKET_FACTORY_DEFAULT(false, "hadoop.rpc.socket.factory.class.default", 112 | "org.apache.hadoop.net.StandardSocketFactory"), 113 | 114 | /** 115 | * Property name for the SOCKS server URI. 116 | */ 117 | SOCKS_SERVER(false, "hadoop.socks.server", "host:1080"), 118 | 119 | ; 120 | 121 | /** 122 | * Map -> ConfProp 123 | */ 124 | private static Map map; 125 | 126 | private static synchronized void registerProperty(String name, 127 | ConfProp prop) { 128 | 129 | if (ConfProp.map == null) 130 | ConfProp.map = new HashMap(); 131 | 132 | ConfProp.map.put(name, prop); 133 | } 134 | 135 | public static ConfProp getByName(String propName) { 136 | return map.get(propName); 137 | } 138 | 139 | public final String name; 140 | 141 | public final String defVal; 142 | 143 | ConfProp(boolean internal, String name, String defVal) { 144 | if (internal) 145 | name = "eclipse.plug-in." + name; 146 | this.name = name; 147 | this.defVal = defVal; 148 | 149 | ConfProp.registerProperty(name, this); 150 | } 151 | 152 | String get(Configuration conf) { 153 | return conf.get(name); 154 | } 155 | 156 | void set(Configuration conf, String value) { 157 | assert value != null; 158 | conf.set(name, value); 159 | } 160 | 161 | } 162 | -------------------------------------------------------------------------------- /src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/server/HadoopPathPage.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.hadoop.eclipse.server; 20 | 21 | import org.eclipse.core.runtime.IProgressMonitor; 22 | import org.eclipse.swt.graphics.Image; 23 | import org.eclipse.swt.widgets.Composite; 24 | import org.eclipse.ui.IEditorInput; 25 | import org.eclipse.ui.IEditorPart; 26 | import org.eclipse.ui.IEditorSite; 27 | import org.eclipse.ui.IPropertyListener; 28 | import org.eclipse.ui.IWorkbenchPartSite; 29 | import org.eclipse.ui.PartInitException; 30 | 31 | public class HadoopPathPage implements IEditorPart { 32 | 33 | public IEditorInput getEditorInput() { 34 | // TODO Auto-generated method stub 35 | return null; 36 | } 37 | 38 | public IEditorSite getEditorSite() { 39 | // TODO Auto-generated method stub 40 | return null; 41 | } 42 | 43 | public void init(IEditorSite site, IEditorInput input) 44 | throws PartInitException { 45 | // TODO Auto-generated method stub 46 | 47 | } 48 | 49 | public void addPropertyListener(IPropertyListener listener) { 50 | // TODO Auto-generated method stub 51 | 52 | } 53 | 54 | public void createPartControl(Composite parent) { 55 | // TODO Auto-generated method stub 56 | 57 | } 58 | 59 | public void dispose() { 60 | // TODO Auto-generated method stub 61 | 62 | } 63 | 64 | public IWorkbenchPartSite getSite() { 65 | // TODO Auto-generated method stub 66 | return null; 67 | } 68 | 69 | public String getTitle() { 70 | // TODO Auto-generated method stub 71 | return null; 72 | } 73 | 74 | public Image getTitleImage() { 75 | // TODO Auto-generated method stub 76 | return null; 77 | } 78 | 79 | public String getTitleToolTip() { 80 | // TODO Auto-generated method stub 81 | return null; 82 | } 83 | 84 | public void removePropertyListener(IPropertyListener listener) { 85 | // TODO Auto-generated method stub 86 | 87 | } 88 | 89 | public void setFocus() { 90 | // TODO Auto-generated method stub 91 | 92 | } 93 | 94 | public Object getAdapter(Class adapter) { 95 | // TODO Auto-generated method stub 96 | return null; 97 | } 98 | 99 | public void doSave(IProgressMonitor monitor) { 100 | // TODO Auto-generated method stub 101 | 102 | } 103 | 104 | public void doSaveAs() { 105 | // TODO Auto-generated method stub 106 | 107 | } 108 | 109 | public boolean isDirty() { 110 | // TODO Auto-generated method stub 111 | return false; 112 | } 113 | 114 | public boolean isSaveAsAllowed() { 115 | // TODO Auto-generated method stub 116 | return false; 117 | } 118 | 119 | public boolean isSaveOnCloseNeeded() { 120 | // TODO Auto-generated method stub 121 | return false; 122 | } 123 | 124 | } 125 | -------------------------------------------------------------------------------- /src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/server/IJobListener.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.hadoop.eclipse.server; 20 | 21 | /** 22 | * Interface for updating/adding jobs to the MapReduce Server view. 23 | */ 24 | public interface IJobListener { 25 | 26 | void jobChanged(HadoopJob job); 27 | 28 | void jobAdded(HadoopJob job); 29 | 30 | void jobRemoved(HadoopJob job); 31 | 32 | void publishStart(JarModule jar); 33 | 34 | void publishDone(JarModule jar); 35 | 36 | } 37 | -------------------------------------------------------------------------------- /src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/server/JarModule.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.hadoop.eclipse.server; 20 | 21 | import java.io.File; 22 | import java.util.logging.Logger; 23 | 24 | import org.apache.hadoop.eclipse.Activator; 25 | import org.apache.hadoop.eclipse.ErrorMessageDialog; 26 | import org.eclipse.core.resources.IResource; 27 | import org.eclipse.core.runtime.IProgressMonitor; 28 | import org.eclipse.core.runtime.Path; 29 | import org.eclipse.jdt.core.ICompilationUnit; 30 | import org.eclipse.jdt.core.IJavaElement; 31 | import org.eclipse.jdt.core.IType; 32 | import org.eclipse.jdt.ui.jarpackager.IJarExportRunnable; 33 | import org.eclipse.jdt.ui.jarpackager.JarPackageData; 34 | import org.eclipse.jface.operation.IRunnableWithProgress; 35 | import org.eclipse.swt.widgets.Display; 36 | import org.eclipse.ui.PlatformUI; 37 | 38 | /** 39 | * Methods for interacting with the jar file containing the 40 | * Mapper/Reducer/Driver classes for a MapReduce job. 41 | */ 42 | 43 | public class JarModule implements IRunnableWithProgress { 44 | 45 | static Logger log = Logger.getLogger(JarModule.class.getName()); 46 | 47 | private IResource resource; 48 | 49 | private File jarFile; 50 | 51 | public JarModule(IResource resource) { 52 | this.resource = resource; 53 | } 54 | 55 | public String getName() { 56 | return resource.getProject().getName() + "/" + resource.getName(); 57 | } 58 | 59 | /** 60 | * Creates a JAR file containing the given resource (Java class with 61 | * main()) and all associated resources 62 | * 63 | * @param resource the resource 64 | * @return a file designing the created package 65 | */ 66 | public void run(IProgressMonitor monitor) { 67 | 68 | log.fine("Build jar"); 69 | JarPackageData jarrer = new JarPackageData(); 70 | 71 | jarrer.setExportJavaFiles(true); 72 | jarrer.setExportClassFiles(true); 73 | jarrer.setExportOutputFolders(true); 74 | jarrer.setOverwrite(true); 75 | 76 | try { 77 | // IJavaProject project = 78 | // (IJavaProject) resource.getProject().getNature(JavaCore.NATURE_ID); 79 | 80 | // check this is the case before letting this method get called 81 | Object element = resource.getAdapter(IJavaElement.class); 82 | IType type = ((ICompilationUnit) element).findPrimaryType(); 83 | jarrer.setManifestMainClass(type); 84 | 85 | // Create a temporary JAR file name 86 | File baseDir = Activator.getDefault().getStateLocation().toFile(); 87 | 88 | String prefix = 89 | String.format("%s_%s-", resource.getProject().getName(), resource 90 | .getName()); 91 | File jarFile = File.createTempFile(prefix, ".jar", baseDir); 92 | jarrer.setJarLocation(new Path(jarFile.getAbsolutePath())); 93 | 94 | jarrer.setElements(resource.getProject().members(IResource.FILE)); 95 | IJarExportRunnable runnable = 96 | jarrer.createJarExportRunnable(Display.getDefault() 97 | .getActiveShell()); 98 | runnable.run(monitor); 99 | 100 | this.jarFile = jarFile; 101 | 102 | } catch (Exception e) { 103 | e.printStackTrace(); 104 | throw new RuntimeException(e); 105 | } 106 | } 107 | 108 | /** 109 | * Allow the retrieval of the resulting JAR file 110 | * 111 | * @return the generated JAR file 112 | */ 113 | public File getJarFile() { 114 | return this.jarFile; 115 | } 116 | 117 | /** 118 | * Static way to create a JAR package for the given resource and showing a 119 | * progress bar 120 | * 121 | * @param resource 122 | * @return 123 | */ 124 | public static File createJarPackage(IResource resource) { 125 | 126 | JarModule jarModule = new JarModule(resource); 127 | try { 128 | PlatformUI.getWorkbench().getProgressService().run(false, true, 129 | jarModule); 130 | 131 | } catch (Exception e) { 132 | e.printStackTrace(); 133 | return null; 134 | } 135 | 136 | File jarFile = jarModule.getJarFile(); 137 | if (jarFile == null) { 138 | ErrorMessageDialog.display("Run on Hadoop", 139 | "Unable to create or locate the JAR file for the Job"); 140 | return null; 141 | } 142 | 143 | return jarFile; 144 | } 145 | 146 | } 147 | -------------------------------------------------------------------------------- /src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/servers/HadoopServerSelectionListContentProvider.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.hadoop.eclipse.servers; 20 | 21 | import org.apache.hadoop.eclipse.server.HadoopServer; 22 | import org.eclipse.jface.viewers.IContentProvider; 23 | import org.eclipse.jface.viewers.ILabelProviderListener; 24 | import org.eclipse.jface.viewers.IStructuredContentProvider; 25 | import org.eclipse.jface.viewers.ITableLabelProvider; 26 | import org.eclipse.jface.viewers.Viewer; 27 | import org.eclipse.swt.graphics.Image; 28 | 29 | /** 30 | * Provider that enables selection of a predefined Hadoop server. 31 | */ 32 | 33 | public class HadoopServerSelectionListContentProvider implements 34 | IContentProvider, ITableLabelProvider, IStructuredContentProvider { 35 | public void dispose() { 36 | 37 | } 38 | 39 | public void inputChanged(Viewer viewer, Object oldInput, Object newInput) { 40 | 41 | } 42 | 43 | public Image getColumnImage(Object element, int columnIndex) { 44 | return null; 45 | } 46 | 47 | public String getColumnText(Object element, int columnIndex) { 48 | if (element instanceof HadoopServer) { 49 | HadoopServer location = (HadoopServer) element; 50 | if (columnIndex == 0) { 51 | return location.getLocationName(); 52 | 53 | } else if (columnIndex == 1) { 54 | return location.getMasterHostName(); 55 | } 56 | } 57 | 58 | return element.toString(); 59 | } 60 | 61 | public void addListener(ILabelProviderListener listener) { 62 | 63 | } 64 | 65 | public boolean isLabelProperty(Object element, String property) { 66 | return false; 67 | } 68 | 69 | public void removeListener(ILabelProviderListener listener) { 70 | 71 | } 72 | 73 | public Object[] getElements(Object inputElement) { 74 | return ServerRegistry.getInstance().getServers().toArray(); 75 | } 76 | } 77 | -------------------------------------------------------------------------------- /src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/servers/IHadoopServerListener.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.hadoop.eclipse.servers; 20 | 21 | import org.apache.hadoop.eclipse.server.HadoopServer; 22 | 23 | /** 24 | * Interface for monitoring server changes 25 | */ 26 | public interface IHadoopServerListener { 27 | void serverChanged(HadoopServer location, int type); 28 | } 29 | -------------------------------------------------------------------------------- /src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/servers/ServerRegistry.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.hadoop.eclipse.servers; 20 | 21 | import java.io.File; 22 | import java.io.FilenameFilter; 23 | import java.io.IOException; 24 | import java.util.Collection; 25 | import java.util.Collections; 26 | import java.util.HashSet; 27 | import java.util.Map; 28 | import java.util.Set; 29 | import java.util.TreeMap; 30 | 31 | import org.apache.hadoop.conf.Configuration; 32 | import org.apache.hadoop.eclipse.Activator; 33 | import org.apache.hadoop.eclipse.server.HadoopServer; 34 | import org.eclipse.jface.dialogs.MessageDialog; 35 | 36 | /** 37 | * Register of Hadoop locations. 38 | * 39 | * Each location corresponds to a Hadoop {@link Configuration} stored as an 40 | * XML file in the workspace plug-in configuration directory: 41 | *

42 | * 43 | * <workspace-dir>/.metadata/.plugins/org.apache.hadoop.eclipse/locations/*.xml 44 | * 45 | * 46 | */ 47 | public class ServerRegistry { 48 | 49 | private static final ServerRegistry INSTANCE = new ServerRegistry(); 50 | 51 | public static final int SERVER_ADDED = 0; 52 | 53 | public static final int SERVER_REMOVED = 1; 54 | 55 | public static final int SERVER_STATE_CHANGED = 2; 56 | 57 | private final File baseDir = 58 | Activator.getDefault().getStateLocation().toFile(); 59 | 60 | private final File saveDir = new File(baseDir, "locations"); 61 | 62 | private ServerRegistry() { 63 | if (saveDir.exists() && !saveDir.isDirectory()) 64 | saveDir.delete(); 65 | if (!saveDir.exists()) 66 | saveDir.mkdirs(); 67 | 68 | load(); 69 | } 70 | 71 | private Map servers; 72 | 73 | private Set listeners = 74 | new HashSet(); 75 | 76 | public static ServerRegistry getInstance() { 77 | return INSTANCE; 78 | } 79 | 80 | public synchronized Collection getServers() { 81 | return Collections.unmodifiableCollection(servers.values()); 82 | } 83 | 84 | /** 85 | * Load all available locations from the workspace configuration directory. 86 | */ 87 | private synchronized void load() { 88 | Map map = new TreeMap(); 89 | for (File file : saveDir.listFiles()) { 90 | try { 91 | HadoopServer server = new HadoopServer(file); 92 | map.put(server.getLocationName(), server); 93 | 94 | } catch (Exception exn) { 95 | System.err.println(exn); 96 | } 97 | } 98 | this.servers = map; 99 | } 100 | 101 | private synchronized void store() { 102 | try { 103 | File dir = File.createTempFile("locations", "new", baseDir); 104 | dir.delete(); 105 | dir.mkdirs(); 106 | 107 | for (HadoopServer server : servers.values()) { 108 | server.storeSettingsToFile(new File(dir, server.getLocationName() 109 | + ".xml")); 110 | } 111 | 112 | FilenameFilter XMLFilter = new FilenameFilter() { 113 | public boolean accept(File dir, String name) { 114 | String lower = name.toLowerCase(); 115 | return lower.endsWith(".xml"); 116 | } 117 | }; 118 | 119 | File backup = new File(baseDir, "locations.backup"); 120 | if (backup.exists()) { 121 | for (File file : backup.listFiles(XMLFilter)) 122 | if (!file.delete()) 123 | throw new IOException("Unable to delete backup location file: " 124 | + file); 125 | if (!backup.delete()) 126 | throw new IOException( 127 | "Unable to delete backup location directory: " + backup); 128 | } 129 | 130 | saveDir.renameTo(backup); 131 | dir.renameTo(saveDir); 132 | 133 | } catch (IOException ioe) { 134 | ioe.printStackTrace(); 135 | MessageDialog.openError(null, 136 | "Saving configuration of Hadoop locations failed", ioe.toString()); 137 | } 138 | } 139 | 140 | public void dispose() { 141 | for (HadoopServer server : getServers()) { 142 | server.dispose(); 143 | } 144 | } 145 | 146 | public synchronized HadoopServer getServer(String location) { 147 | return servers.get(location); 148 | } 149 | 150 | /* 151 | * HadoopServer map listeners 152 | */ 153 | 154 | public void addListener(IHadoopServerListener l) { 155 | synchronized (listeners) { 156 | listeners.add(l); 157 | } 158 | } 159 | 160 | public void removeListener(IHadoopServerListener l) { 161 | synchronized (listeners) { 162 | listeners.remove(l); 163 | } 164 | } 165 | 166 | private void fireListeners(HadoopServer location, int kind) { 167 | synchronized (listeners) { 168 | for (IHadoopServerListener listener : listeners) { 169 | listener.serverChanged(location, kind); 170 | } 171 | } 172 | } 173 | 174 | public synchronized void removeServer(HadoopServer server) { 175 | this.servers.remove(server.getLocationName()); 176 | store(); 177 | fireListeners(server, SERVER_REMOVED); 178 | } 179 | 180 | public synchronized void addServer(HadoopServer server) { 181 | this.servers.put(server.getLocationName(), server); 182 | store(); 183 | fireListeners(server, SERVER_ADDED); 184 | } 185 | 186 | /** 187 | * Update one Hadoop location 188 | * 189 | * @param originalName the original location name (might have changed) 190 | * @param server the location 191 | */ 192 | public synchronized void updateServer(String originalName, 193 | HadoopServer server) { 194 | 195 | // Update the map if the location name has changed 196 | if (!server.getLocationName().equals(originalName)) { 197 | servers.remove(originalName); 198 | servers.put(server.getLocationName(), server); 199 | } 200 | store(); 201 | fireListeners(server, SERVER_STATE_CHANGED); 202 | } 203 | } 204 | -------------------------------------------------------------------------------- /src/ivy/hadoop-client-pom-template.xml: -------------------------------------------------------------------------------- 1 | 2 | 15 | 17 | 4.0.0 18 | 19 | org.apache 20 | apache 21 | 9 22 | 23 | org.apache.hadoop 24 | hadoop-client 25 | @version 26 | jar 27 | 28 | Apache Hadoop Client 29 | Apache Hadoop Client 30 | 31 | 32 | 33 | org.apache.hadoop 34 | hadoop-core 35 | @version 36 | compile 37 | 38 | 39 | commons-cli 40 | commons-cli 41 | 42 | 43 | commons-httpclient 44 | commons-httpclient 45 | 46 | 47 | tomcat 48 | jasper-compiler 49 | 50 | 51 | tomcat 52 | jasper-runtime 53 | 54 | 55 | javax.servlet 56 | servlet-api 57 | 58 | 59 | javax.servlet.jsp 60 | jsp-api 61 | 62 | 63 | jetty 64 | org.mortbay.jetty 65 | 66 | 67 | org.mortbay.jetty 68 | jetty 69 | 70 | 71 | org.mortbay.jetty 72 | jetty-util 73 | 74 | 75 | org.mortbay.jetty 76 | jsp-2.1 77 | 78 | 79 | org.mortbay.jetty 80 | jsp-api-2.1 81 | 82 | 83 | org.mortbay.jetty 84 | servlet-api-2.5 85 | 86 | 87 | net.sf.kosmosfs 88 | kfs 89 | 90 | 91 | net.java.dev.jets3t 92 | jets3t 93 | 94 | 95 | org.eclipse.jdt 96 | core 97 | 98 | 99 | 100 | 101 | 102 | 103 | 104 | -------------------------------------------------------------------------------- /src/ivy/hadoop-core-pom-template.xml: -------------------------------------------------------------------------------- 1 | 2 | 18 | 20 | 4.0.0 21 | org.apache.hadoop 22 | hadoop-core 23 | jar 24 | @version 25 | 26 | 27 | commons-cli 28 | commons-cli 29 | 1.2 30 | 31 | 32 | xmlenc 33 | xmlenc 34 | 0.52 35 | 36 | 37 | com.sun.jersey 38 | jersey-core 39 | 1.8 40 | 41 | 42 | com.sun.jersey 43 | jersey-json 44 | 1.8 45 | 46 | 47 | com.sun.jersey 48 | jersey-server 49 | 1.8 50 | 51 | 52 | commons-io 53 | commons-io 54 | 2.1 55 | 56 | 57 | commons-httpclient 58 | commons-httpclient 59 | 3.0.1 60 | 61 | 62 | commons-codec 63 | commons-codec 64 | 1.4 65 | 66 | 67 | org.apache.commons 68 | commons-math 69 | 2.1 70 | 71 | 72 | commons-configuration 73 | commons-configuration 74 | 1.6 75 | 76 | 77 | commons-net 78 | commons-net 79 | 1.4.1 80 | 81 | 82 | org.mortbay.jetty 83 | jetty 84 | 6.1.26 85 | 86 | 87 | org.mortbay.jetty 88 | jetty-util 89 | 6.1.26 90 | 91 | 92 | tomcat 93 | jasper-runtime 94 | 5.5.12 95 | 96 | 97 | tomcat 98 | jasper-compiler 99 | 5.5.12 100 | 101 | 102 | org.mortbay.jetty 103 | jsp-api-2.1 104 | 6.1.14 105 | 106 | 107 | org.mortbay.jetty 108 | jsp-2.1 109 | 6.1.14 110 | 111 | 112 | commons-el 113 | commons-el 114 | 1.0 115 | 116 | 117 | net.java.dev.jets3t 118 | jets3t 119 | 0.6.1 120 | 121 | 122 | hsqldb 123 | hsqldb 124 | 1.8.0.10 125 | 126 | 127 | oro 128 | oro 129 | 2.0.8 130 | 131 | 132 | org.eclipse.jdt 133 | core 134 | 3.1.1 135 | 136 | 137 | org.codehaus.jackson 138 | jackson-mapper-asl 139 | 1.8.8 140 | 141 | 142 | 143 | -------------------------------------------------------------------------------- /src/ivy/hadoop-examples-pom-template.xml: -------------------------------------------------------------------------------- 1 | 2 | 18 | 19 | 21 | 22 | 4.0.0 23 | org.apache.hadoop 24 | hadoop-examples 25 | jar 26 | @version 27 | 28 | 29 | org.apache.hadoop 30 | hadoop-core 31 | @version 32 | 33 | 34 | 35 | -------------------------------------------------------------------------------- /src/ivy/hadoop-minicluster-pom-template.xml: -------------------------------------------------------------------------------- 1 | 2 | 15 | 17 | 4.0.0 18 | 19 | org.apache 20 | apache 21 | 9 22 | 23 | org.apache.hadoop 24 | hadoop-minicluster 25 | @version 26 | jar 27 | 28 | Apache Hadoop Mini-Cluster 29 | Apache Hadoop Mini-Cluster 30 | 31 | 32 | 33 | org.apache.hadoop 34 | hadoop-core 35 | @version 36 | compile 37 | 38 | 39 | org.apache.hadoop 40 | hadoop-test 41 | @version 42 | compile 43 | 44 | 45 | com.sun.jersey 46 | jersey-server 47 | 1.0 48 | compile 49 | 50 | 51 | 52 | 53 | 54 | -------------------------------------------------------------------------------- /src/ivy/hadoop-streaming-pom-template.xml: -------------------------------------------------------------------------------- 1 | 2 | 18 | 19 | 21 | 22 | 4.0.0 23 | org.apache.hadoop 24 | hadoop-streaming 25 | jar 26 | @version 27 | 28 | 29 | org.apache.hadoop 30 | hadoop-core 31 | @version 32 | 33 | 34 | 35 | -------------------------------------------------------------------------------- /src/ivy/hadoop-test-pom-template.xml: -------------------------------------------------------------------------------- 1 | 2 | 18 | 19 | 21 | 4.0.0 22 | org.apache.hadoop 23 | hadoop-test 24 | jar 25 | @version 26 | 27 | 28 | org.apache.hadoop 29 | hadoop-core 30 | @version 31 | 32 | 33 | org.apache.ftpserver 34 | ftplet-api 35 | 1.0.0 36 | 37 | 38 | org.apache.mina 39 | mina-core 40 | 2.0.0-M5 41 | 42 | 43 | org.apache.ftpserver 44 | ftpserver-core 45 | 1.0.0 46 | 47 | 48 | org.apache.ftpserver 49 | ftpserver-deprecated 50 | 1.0.0-M2 51 | 52 | 53 | 54 | -------------------------------------------------------------------------------- /src/ivy/hadoop-tools-pom-template.xml: -------------------------------------------------------------------------------- 1 | 2 | 18 | 19 | 21 | 22 | 4.0.0 23 | org.apache.hadoop 24 | hadoop-tools 25 | jar 26 | @version 27 | 28 | 29 | org.apache.hadoop 30 | hadoop-core 31 | @version 32 | 33 | 34 | 35 | -------------------------------------------------------------------------------- /src/ivy/ivysettings.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 19 | 20 | 23 | 32 | 35 | 38 | 41 | 43 | 45 | 46 | 47 | 48 | 49 | 54 | 59 | 64 | 65 | 66 | 67 | 68 | 69 | 70 | 71 | 72 | 73 | 74 | 75 | 76 | 77 | 78 | 79 | 80 | 81 | 86 | 87 | 88 | 89 | 90 | 91 | -------------------------------------------------------------------------------- /src/ivy/libraries.properties: -------------------------------------------------------------------------------- 1 | # Licensed under the Apache License, Version 2.0 (the "License"); 2 | # you may not use this file except in compliance with the License. 3 | # You may obtain a copy of the License at 4 | # 5 | # http://www.apache.org/licenses/LICENSE-2.0 6 | # 7 | # Unless required by applicable law or agreed to in writing, software 8 | # distributed under the License is distributed on an "AS IS" BASIS, 9 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 10 | # See the License for the specific language governing permissions and 11 | # limitations under the License. 12 | 13 | #This properties file lists the versions of the various artifacts used by hadoop and components. 14 | #It drives ivy and the generation of a maven POM 15 | 16 | # This is the version of hadoop we are generating 17 | hadoop.version=2.6.0 18 | hadoop-gpl-compression.version=0.1.0 19 | 20 | #These are the versions of our dependencies (in alphabetical order) 21 | apacheant.version=1.7.0 22 | ant-task.version=2.0.10 23 | 24 | asm.version=3.2 25 | aspectj.version=1.6.5 26 | aspectj.version=1.6.11 27 | 28 | checkstyle.version=4.2 29 | 30 | commons-cli.version=1.2 31 | commons-codec.version=1.4 32 | commons-collections.version=3.1 33 | commons-configuration.version=1.6 34 | commons-daemon.version=1.0.13 35 | commons-httpclient.version=3.0.1 36 | commons-lang.version=2.6 37 | commons-logging.version=1.0.4 38 | commons-logging-api.version=1.0.4 39 | commons-math.version=2.1 40 | commons-el.version=1.0 41 | commons-fileupload.version=1.2 42 | commons-io.version=2.1 43 | commons-net.version=3.1 44 | core.version=3.1.1 45 | coreplugin.version=1.3.2 46 | 47 | hsqldb.version=1.8.0.10 48 | 49 | ivy.version=2.1.0 50 | 51 | jasper.version=5.5.12 52 | jackson.version=1.8.8 53 | #not able to figureout the version of jsp & jsp-api version to get it resolved throught ivy 54 | # but still declared here as we are going to have a local copy from the lib folder 55 | jsp.version=2.1 56 | jsp-api.version=5.5.12 57 | jsp-api-2.1.version=6.1.14 58 | jsp-2.1.version=6.1.14 59 | jets3t.version=0.6.1 60 | jetty.version=6.1.26 61 | jetty-util.version=6.1.26 62 | jersey-core.version=1.8 63 | jersey-json.version=1.8 64 | jersey-server.version=1.8 65 | junit.version=4.5 66 | jdeb.version=0.8 67 | jdiff.version=1.0.9 68 | json.version=1.0 69 | 70 | kfs.version=0.1 71 | 72 | log4j.version=1.2.15 73 | lucene-core.version=2.3.1 74 | 75 | mockito-all.version=1.8.5 76 | jsch.version=0.1.42 77 | 78 | oro.version=2.0.8 79 | 80 | rats-lib.version=0.5.1 81 | 82 | servlet.version=4.0.6 83 | servlet-api.version=2.5 84 | slf4j-api.version=1.4.3 85 | slf4j-log4j12.version=1.4.3 86 | 87 | wagon-http.version=1.0-beta-2 88 | xmlenc.version=0.52 89 | xerces.version=1.4.4 90 | --------------------------------------------------------------------------------