001    /**
002     * Licensed to the Apache Software Foundation (ASF) under one
003     * or more contributor license agreements.  See the NOTICE file
004     * distributed with this work for additional information
005     * regarding copyright ownership.  The ASF licenses this file
006     * to you under the Apache License, Version 2.0 (the
007     * "License"); you may not use this file except in compliance
008     * with the License.  You may obtain a copy of the License at
009     *
010     *     http://www.apache.org/licenses/LICENSE-2.0
011     *
012     * Unless required by applicable law or agreed to in writing, software
013     * distributed under the License is distributed on an "AS IS" BASIS,
014     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
015     * See the License for the specific language governing permissions and
016     * limitations under the License.
017     */
018    
019    package org.apache.hadoop.mapred;
020    
021    import java.io.IOException;
022    import java.io.DataInput;
023    import java.io.DataOutput;
024    
025    import org.apache.hadoop.classification.InterfaceAudience;
026    import org.apache.hadoop.classification.InterfaceStability;
027    import org.apache.hadoop.fs.Path;
028    
029    /** A section of an input file.  Returned by {@link
030     * InputFormat#getSplits(JobConf, int)} and passed to
031     * {@link InputFormat#getRecordReader(InputSplit,JobConf,Reporter)}. 
032     * @deprecated Use {@link org.apache.hadoop.mapreduce.lib.input.FileSplit}
033     *  instead.
034     */
035    @Deprecated
036    @InterfaceAudience.Public
037    @InterfaceStability.Stable
038    public class FileSplit extends org.apache.hadoop.mapreduce.InputSplit 
039                           implements InputSplit {
040      org.apache.hadoop.mapreduce.lib.input.FileSplit fs; 
041      protected FileSplit() {
042        fs = new org.apache.hadoop.mapreduce.lib.input.FileSplit();
043      }
044    
045      /** Constructs a split.
046       * @deprecated
047       * @param file the file name
048       * @param start the position of the first byte in the file to process
049       * @param length the number of bytes in the file to process
050       */
051      @Deprecated
052      public FileSplit(Path file, long start, long length, JobConf conf) {
053        this(file, start, length, (String[])null);
054      }
055    
056      /** Constructs a split with host information
057       *
058       * @param file the file name
059       * @param start the position of the first byte in the file to process
060       * @param length the number of bytes in the file to process
061       * @param hosts the list of hosts containing the block, possibly null
062       */
063      public FileSplit(Path file, long start, long length, String[] hosts) {
064        fs = new org.apache.hadoop.mapreduce.lib.input.FileSplit(file, start,
065               length, hosts);
066      }
067      
068      public FileSplit(org.apache.hadoop.mapreduce.lib.input.FileSplit fs) {
069        this.fs = fs;
070      }
071    
072      /** The file containing this split's data. */
073      public Path getPath() { return fs.getPath(); }
074      
075      /** The position of the first byte in the file to process. */
076      public long getStart() { return fs.getStart(); }
077      
078      /** The number of bytes in the file to process. */
079      public long getLength() { return fs.getLength(); }
080    
081      public String toString() { return fs.toString(); }
082    
083      ////////////////////////////////////////////
084      // Writable methods
085      ////////////////////////////////////////////
086    
087      public void write(DataOutput out) throws IOException {
088        fs.write(out);
089      }
090      public void readFields(DataInput in) throws IOException {
091        fs.readFields(in);
092      }
093    
094      public String[] getLocations() throws IOException {
095        return fs.getLocations();
096      }
097      
098    }