|
||||||||||
PREV CLASS NEXT CLASS | FRAMES NO FRAMES | |||||||||
SUMMARY: NESTED | FIELD | CONSTR | METHOD | DETAIL: FIELD | CONSTR | METHOD |
java.lang.Objectorg.apache.hadoop.mapreduce.Mapper.Context
org.apache.hadoop.mapreduce.lib.map.WrappedMapper.Context
@InterfaceStability.Evolving public class WrappedMapper.Context
Field Summary | |
---|---|
protected MapContext<KEYIN,VALUEIN,KEYOUT,VALUEOUT> |
mapContext
|
Fields inherited from interface org.apache.hadoop.mapreduce.MRJobConfig |
---|
CACHE_ARCHIVES, CACHE_ARCHIVES_SIZES, CACHE_ARCHIVES_TIMESTAMPS, CACHE_ARCHIVES_VISIBILITIES, CACHE_FILE_TIMESTAMPS, CACHE_FILE_VISIBILITIES, CACHE_FILES, CACHE_FILES_SIZES, CACHE_LOCALARCHIVES, CACHE_LOCALFILES, CACHE_SYMLINK, CLASSPATH_ARCHIVES, CLASSPATH_FILES, COMBINE_CLASS_ATTR, COMPLETED_MAPS_FOR_REDUCE_SLOWSTART, END_NOTIFICATION_RETRIE_INTERVAL, END_NOTIFICATION_RETRIES, END_NOTIFICATION_URL, GROUP_COMPARATOR_CLASS, HISTORY_LOCATION, ID, INPUT_FORMAT_CLASS_ATTR, IO_SORT_FACTOR, IO_SORT_MB, JAR, JAR_UNPACK_PATTERN, JOB_ACL_MODIFY_JOB, JOB_ACL_VIEW_JOB, JOB_CANCEL_DELEGATION_TOKEN, JOB_JOBTRACKER_ID, JOB_LOCAL_DIR, JOB_NAME, JOB_NAMENODES, JVM_NUMTASKS_TORUN, KEY_COMPARATOR, MAP_CLASS_ATTR, MAP_COMBINE_MIN_SPILLS, MAP_DEBUG_SCRIPT, MAP_ENV, MAP_FAILURES_MAX_PERCENT, MAP_INPUT_FILE, MAP_INPUT_PATH, MAP_INPUT_START, MAP_JAVA_OPTS, MAP_LOG_LEVEL, MAP_MAX_ATTEMPTS, MAP_MEMORY_MB, MAP_MEMORY_PHYSICAL_MB, MAP_OUTPUT_COMPRESS, MAP_OUTPUT_COMPRESS_CODEC, MAP_OUTPUT_KEY_CLASS, MAP_OUTPUT_KEY_FIELD_SEPERATOR, MAP_OUTPUT_VALUE_CLASS, MAP_SKIP_INCR_PROC_COUNT, MAP_SKIP_MAX_RECORDS, MAP_SORT_SPILL_PERCENT, MAP_SPECULATIVE, MAP_ULIMIT, MAX_TASK_FAILURES_PER_TRACKER, NUM_MAP_PROFILES, NUM_MAPS, NUM_REDUCE_PROFILES, NUM_REDUCES, OUTPUT_FORMAT_CLASS_ATTR, OUTPUT_KEY_CLASS, OUTPUT_VALUE_CLASS, PARTITIONER_CLASS_ATTR, PRESERVE_FAILED_TASK_FILES, PRESERVE_FILES_PATTERN, PRIORITY, QUEUE_NAME, RECORDS_BEFORE_PROGRESS, REDUCE_CLASS_ATTR, REDUCE_DEBUG_SCRIPT, REDUCE_ENV, REDUCE_FAILURES_MAXPERCENT, REDUCE_INPUT_BUFFER_PERCENT, REDUCE_JAVA_OPTS, REDUCE_LOG_LEVEL, REDUCE_MARKRESET_BUFFER_PERCENT, REDUCE_MARKRESET_BUFFER_SIZE, REDUCE_MAX_ATTEMPTS, REDUCE_MEMORY_MB, REDUCE_MEMORY_PHYSICAL_MB, REDUCE_MEMORY_TOTAL_BYTES, REDUCE_MEMTOMEM_ENABLED, REDUCE_MEMTOMEM_THRESHOLD, REDUCE_MERGE_INMEM_THRESHOLD, REDUCE_SKIP_INCR_PROC_COUNT, REDUCE_SKIP_MAXGROUPS, REDUCE_SPECULATIVE, REDUCE_ULIMIT, SETUP_CLEANUP_NEEDED, SHUFFLE_CONNECT_TIMEOUT, SHUFFLE_FETCH_FAILURES, SHUFFLE_INPUT_BUFFER_PERCENT, SHUFFLE_MERGE_EPRCENT, SHUFFLE_NOTIFY_READERROR, SHUFFLE_PARALLEL_COPIES, SHUFFLE_READ_TIMEOUT, SKIP_OUTDIR, SKIP_RECORDS, SKIP_START_ATTEMPTS, SPECULATIVE_SLOWNODE_THRESHOLD, SPECULATIVE_SLOWTASK_THRESHOLD, SPECULATIVECAP, SPLIT_FILE, TASK_ATTEMPT_ID, TASK_DEBUGOUT_LINES, TASK_ID, TASK_ISMAP, TASK_OUTPUT_DIR, TASK_PARTITION, TASK_PROFILE, TASK_PROFILE_PARAMS, TASK_TEMP_DIR, TASK_TIMEOUT, TASK_USERLOG_LIMIT, USER_LOG_RETAIN_HOURS, USER_NAME, WORKING_DIR |
Constructor Summary | |
---|---|
WrappedMapper.Context(MapContext<KEYIN,VALUEIN,KEYOUT,VALUEOUT> mapContext)
|
Method Summary | |
---|---|
org.apache.hadoop.fs.Path[] |
getArchiveClassPaths()
Get the archive entries in classpath as an array of Path |
String[] |
getArchiveTimestamps()
Get the timestamps of the archives. |
URI[] |
getCacheArchives()
Get cache archives set in the Configuration |
URI[] |
getCacheFiles()
Get cache files set in the Configuration |
Class<? extends Reducer<?,?,?,?>> |
getCombinerClass()
Get the combiner class for the job. |
org.apache.hadoop.conf.Configuration |
getConfiguration()
Return the configuration for the job. |
Counter |
getCounter(Enum<?> counterName)
Get the Counter for the given counterName . |
Counter |
getCounter(String groupName,
String counterName)
Get the Counter for the given groupName and
counterName . |
KEYIN |
getCurrentKey()
Get the current key. |
VALUEIN |
getCurrentValue()
Get the current value. |
org.apache.hadoop.fs.Path[] |
getFileClassPaths()
Get the file entries in classpath as an array of Path |
String[] |
getFileTimestamps()
Get the timestamps of the files. |
org.apache.hadoop.io.RawComparator<?> |
getGroupingComparator()
Get the user defined RawComparator comparator for
grouping keys of inputs to the reduce. |
Class<? extends InputFormat<?,?>> |
getInputFormatClass()
Get the InputFormat class for the job. |
InputSplit |
getInputSplit()
Get the input split for this map. |
String |
getJar()
Get the pathname of the job's jar. |
JobID |
getJobID()
Get the unique ID for the job. |
String |
getJobName()
Get the user-specified job name. |
boolean |
getJobSetupCleanupNeeded()
Get whether job-setup and job-cleanup is needed for the job |
org.apache.hadoop.fs.Path[] |
getLocalCacheArchives()
Return the path array of the localized caches |
org.apache.hadoop.fs.Path[] |
getLocalCacheFiles()
Return the path array of the localized files |
Class<?> |
getMapOutputKeyClass()
Get the key class for the map output data. |
Class<?> |
getMapOutputValueClass()
Get the value class for the map output data. |
Class<? extends Mapper<?,?,?,?>> |
getMapperClass()
Get the Mapper class for the job. |
int |
getMaxMapAttempts()
Get the configured number of maximum attempts that will be made to run a map task, as specified by the mapred.map.max.attempts
property. |
int |
getMaxReduceAttempts()
Get the configured number of maximum attempts that will be made to run a reduce task, as specified by the mapred.reduce.max.attempts
property. |
int |
getNumReduceTasks()
Get configured the number of reduce tasks for this job. |
OutputCommitter |
getOutputCommitter()
Get the OutputCommitter for the task-attempt. |
Class<? extends OutputFormat<?,?>> |
getOutputFormatClass()
Get the OutputFormat class for the job. |
Class<?> |
getOutputKeyClass()
Get the key class for the job output data. |
Class<?> |
getOutputValueClass()
Get the value class for job outputs. |
Class<? extends Partitioner<?,?>> |
getPartitionerClass()
Get the Partitioner class for the job. |
boolean |
getProfileEnabled()
Get whether the task profiling is enabled. |
String |
getProfileParams()
Get the profiler configuration arguments. |
org.apache.hadoop.conf.Configuration.IntegerRanges |
getProfileTaskRange(boolean isMap)
Get the range of maps or reduces to profile. |
Class<? extends Reducer<?,?,?,?>> |
getReducerClass()
Get the Reducer class for the job. |
org.apache.hadoop.io.RawComparator<?> |
getSortComparator()
Get the RawComparator comparator used to compare keys. |
String |
getStatus()
Get the last set status message. |
boolean |
getSymlink()
This method checks to see if symlinks are to be create for the localized cache files in the current working directory |
TaskAttemptID |
getTaskAttemptID()
Get the unique name for this task attempt. |
String |
getUser()
Get the reported username for this job. |
org.apache.hadoop.fs.Path |
getWorkingDirectory()
Get the current working directory for the default file system. |
boolean |
nextKeyValue()
Advance to the next key, value pair, returning null if at end. |
void |
progress()
|
void |
setStatus(String msg)
Set the current status of the task to the given string. |
void |
write(KEYOUT key,
VALUEOUT value)
Generate an output key/value pair. |
Methods inherited from class java.lang.Object |
---|
clone, equals, finalize, getClass, hashCode, notify, notifyAll, toString, wait, wait, wait |
Field Detail |
---|
protected MapContext<KEYIN,VALUEIN,KEYOUT,VALUEOUT> mapContext
Constructor Detail |
---|
public WrappedMapper.Context(MapContext<KEYIN,VALUEIN,KEYOUT,VALUEOUT> mapContext)
Method Detail |
---|
public InputSplit getInputSplit()
public KEYIN getCurrentKey() throws IOException, InterruptedException
TaskInputOutputContext
IOException
InterruptedException
public VALUEIN getCurrentValue() throws IOException, InterruptedException
TaskInputOutputContext
IOException
InterruptedException
public boolean nextKeyValue() throws IOException, InterruptedException
TaskInputOutputContext
IOException
InterruptedException
public Counter getCounter(Enum<?> counterName)
TaskInputOutputContext
Counter
for the given counterName
.
counterName
- counter name
Counter
for the given counterName
public Counter getCounter(String groupName, String counterName)
TaskInputOutputContext
Counter
for the given groupName
and
counterName
.
counterName
- counter name
Counter
for the given groupName
and
counterName
public OutputCommitter getOutputCommitter()
TaskInputOutputContext
OutputCommitter
for the task-attempt.
OutputCommitter
for the task-attemptpublic void write(KEYOUT key, VALUEOUT value) throws IOException, InterruptedException
TaskInputOutputContext
IOException
InterruptedException
public String getStatus()
TaskAttemptContext
public TaskAttemptID getTaskAttemptID()
TaskAttemptContext
public void setStatus(String msg)
TaskAttemptContext
public org.apache.hadoop.fs.Path[] getArchiveClassPaths()
JobContext
public String[] getArchiveTimestamps()
JobContext
public URI[] getCacheArchives() throws IOException
JobContext
IOException
public URI[] getCacheFiles() throws IOException
JobContext
IOException
public Class<? extends Reducer<?,?,?,?>> getCombinerClass() throws ClassNotFoundException
JobContext
ClassNotFoundException
public org.apache.hadoop.conf.Configuration getConfiguration()
JobContext
public org.apache.hadoop.fs.Path[] getFileClassPaths()
JobContext
public String[] getFileTimestamps()
JobContext
public org.apache.hadoop.io.RawComparator<?> getGroupingComparator()
JobContext
RawComparator
comparator for
grouping keys of inputs to the reduce.
for details.
public Class<? extends InputFormat<?,?>> getInputFormatClass() throws ClassNotFoundException
JobContext
InputFormat
class for the job.
InputFormat
class for the job.
ClassNotFoundException
public String getJar()
JobContext
public JobID getJobID()
JobContext
public String getJobName()
JobContext
public boolean getJobSetupCleanupNeeded()
JobContext
public org.apache.hadoop.fs.Path[] getLocalCacheArchives() throws IOException
JobContext
IOException
public org.apache.hadoop.fs.Path[] getLocalCacheFiles() throws IOException
JobContext
IOException
public Class<?> getMapOutputKeyClass()
JobContext
public Class<?> getMapOutputValueClass()
JobContext
public Class<? extends Mapper<?,?,?,?>> getMapperClass() throws ClassNotFoundException
JobContext
Mapper
class for the job.
Mapper
class for the job.
ClassNotFoundException
public int getMaxMapAttempts()
JobContext
mapred.map.max.attempts
property. If this property is not already set, the default is 4 attempts.
public int getMaxReduceAttempts()
JobContext
mapred.reduce.max.attempts
property. If this property is not already set, the default is 4 attempts.
public int getNumReduceTasks()
JobContext
1
.
public Class<? extends OutputFormat<?,?>> getOutputFormatClass() throws ClassNotFoundException
JobContext
OutputFormat
class for the job.
OutputFormat
class for the job.
ClassNotFoundException
public Class<?> getOutputKeyClass()
JobContext
public Class<?> getOutputValueClass()
JobContext
public Class<? extends Partitioner<?,?>> getPartitionerClass() throws ClassNotFoundException
JobContext
Partitioner
class for the job.
Partitioner
class for the job.
ClassNotFoundException
public Class<? extends Reducer<?,?,?,?>> getReducerClass() throws ClassNotFoundException
JobContext
Reducer
class for the job.
Reducer
class for the job.
ClassNotFoundException
public org.apache.hadoop.io.RawComparator<?> getSortComparator()
JobContext
RawComparator
comparator used to compare keys.
RawComparator
comparator used to compare keys.public boolean getSymlink()
JobContext
public org.apache.hadoop.fs.Path getWorkingDirectory() throws IOException
JobContext
IOException
public void progress()
public boolean getProfileEnabled()
JobContext
public String getProfileParams()
JobContext
public org.apache.hadoop.conf.Configuration.IntegerRanges getProfileTaskRange(boolean isMap)
JobContext
isMap
- is the task a map?
public String getUser()
JobContext
|
||||||||||
PREV CLASS NEXT CLASS | FRAMES NO FRAMES | |||||||||
SUMMARY: NESTED | FIELD | CONSTR | METHOD | DETAIL: FIELD | CONSTR | METHOD |