Exemplo n.º 1
0
        /// <summary>Read job outputs</summary>
        /// <exception cref="System.IO.IOException"/>
        internal static IList <TaskResult> ReadJobOutputs(FileSystem fs, Path outdir)
        {
            IList <TaskResult> results = new AList <TaskResult>();

            foreach (FileStatus status in fs.ListStatus(outdir))
            {
                if (status.GetPath().GetName().StartsWith("part-"))
                {
                    BufferedReader @in = new BufferedReader(new InputStreamReader(fs.Open(status.GetPath
                                                                                              ()), Charsets.Utf8));
                    try
                    {
                        for (string line; (line = @in.ReadLine()) != null;)
                        {
                            results.AddItem(TaskResult.ValueOf(line));
                        }
                    }
                    finally
                    {
                        @in.Close();
                    }
                }
            }
            if (results.IsEmpty())
            {
                throw new IOException("Output not found");
            }
            return(results);
        }
Exemplo n.º 2
0
 /// <summary>Close all DFSClient instances in the Cache.</summary>
 /// <param name="onlyAutomatic">only close those that are marked for automatic closing
 ///     </param>
 /// <exception cref="System.IO.IOException"/>
 internal virtual void CloseAll(bool onlyAutomatic)
 {
     lock (this)
     {
         IList <IOException> exceptions        = new AList <IOException>();
         ConcurrentMap <string, DFSClient> map = clientCache.AsMap();
         foreach (KeyValuePair <string, DFSClient> item in map)
         {
             DFSClient client = item.Value;
             if (client != null)
             {
                 try
                 {
                     client.Close();
                 }
                 catch (IOException ioe)
                 {
                     exceptions.AddItem(ioe);
                 }
             }
         }
         if (!exceptions.IsEmpty())
         {
             throw MultipleIOException.CreateIOException(exceptions);
         }
     }
 }
Exemplo n.º 3
0
        /// <summary>
        /// Selects an operation from the known operation set or returns null if none
        /// are available by applying the weighting algorithms and then handing off the
        /// weight operations to the selection object.
        /// </summary>
        /// <param name="elapsed">the currently elapsed time (milliseconds) of the running program
        ///     </param>
        /// <param name="duration">the maximum amount of milliseconds of the running program</param>
        /// <returns>operation or null if none left</returns>
        internal virtual Operation Select(int elapsed, int duration)
        {
            IList <OperationWeight> validOps = new AList <OperationWeight>(operations.Count);

            foreach (Constants.OperationType type in operations.Keys)
            {
                WeightSelector.OperationInfo opinfo = operations[type];
                if (opinfo == null || opinfo.amountLeft <= 0)
                {
                    continue;
                }
                WeightSelector.Weightable weighter = weights[opinfo.distribution];
                if (weighter != null)
                {
                    OperationWeight weightOp = new OperationWeight(opinfo.operation, weighter.Weight(
                                                                       elapsed, duration));
                    validOps.AddItem(weightOp);
                }
                else
                {
                    throw new RuntimeException("Unable to get weight for distribution " + opinfo.distribution
                                               );
                }
            }
            if (validOps.IsEmpty())
            {
                return(null);
            }
            return(GetSelector().Select(validOps));
        }
Exemplo n.º 4
0
        public virtual void TestParseChangedVolumes()
        {
            StartDFSCluster(1, 1);
            DataNode                dn           = cluster.GetDataNodes()[0];
            Configuration           conf         = dn.GetConf();
            string                  oldPaths     = conf.Get(DFSConfigKeys.DfsDatanodeDataDirKey);
            IList <StorageLocation> oldLocations = new AList <StorageLocation>();

            foreach (string path in oldPaths.Split(","))
            {
                oldLocations.AddItem(StorageLocation.Parse(path));
            }
            NUnit.Framework.Assert.IsFalse(oldLocations.IsEmpty());
            string newPaths = oldLocations[0].GetFile().GetAbsolutePath() + ",/foo/path1,/foo/path2";

            DataNode.ChangedVolumes changedVolumes = dn.ParseChangedVolumes(newPaths);
            IList <StorageLocation> newVolumes     = changedVolumes.newLocations;

            NUnit.Framework.Assert.AreEqual(2, newVolumes.Count);
            NUnit.Framework.Assert.AreEqual(new FilePath("/foo/path1").GetAbsolutePath(), newVolumes
                                            [0].GetFile().GetAbsolutePath());
            NUnit.Framework.Assert.AreEqual(new FilePath("/foo/path2").GetAbsolutePath(), newVolumes
                                            [1].GetFile().GetAbsolutePath());
            IList <StorageLocation> removedVolumes = changedVolumes.deactivateLocations;

            NUnit.Framework.Assert.AreEqual(1, removedVolumes.Count);
            NUnit.Framework.Assert.AreEqual(oldLocations[1].GetFile(), removedVolumes[0].GetFile
                                                ());
            NUnit.Framework.Assert.AreEqual(1, changedVolumes.unchangedLocations.Count);
            NUnit.Framework.Assert.AreEqual(oldLocations[0].GetFile(), changedVolumes.unchangedLocations
                                            [0].GetFile());
        }
Exemplo n.º 5
0
        /// <summary>Execute DistSum computations</summary>
        /// <exception cref="System.Exception"/>
        private void Execute(DistSum distsum, IDictionary <Bellard.Parameter, Bellard.Sum>
                             sums)
        {
            IList <DistSum.Computation> computations = new AList <DistSum.Computation>();
            int i = 0;

            foreach (Bellard.Parameter p in Bellard.Parameter.Values())
            {
                foreach (Summation s in sums[p])
                {
                    if (s.GetValue() == null)
                    {
                        computations.AddItem(new DistSum.Computation(this, i++, p.ToString(), s));
                    }
                }
            }
            if (computations.IsEmpty())
            {
                [email protected]("No computation");
            }
            else
            {
                timer.Tick("execute " + computations.Count + " computation(s)");
                Org.Apache.Hadoop.Examples.PI.Util.Execute(distsum.GetParameters().nThreads, computations
                                                           );
                timer.Tick("done");
            }
        }
Exemplo n.º 6
0
 /// <summary>
 /// Create a table that contains all directories under root and
 /// another table that contains all files under root.
 /// </summary>
 private int InitFileDirTables()
 {
     try
     {
         InitFileDirTables(root);
     }
     catch (IOException e)
     {
         System.Console.Error.WriteLine(e.GetLocalizedMessage());
         Runtime.PrintStackTrace(e);
         return(-1);
     }
     if (dirs.IsEmpty())
     {
         System.Console.Error.WriteLine("The test space " + root + " is empty");
         return(-1);
     }
     if (files.IsEmpty())
     {
         System.Console.Error.WriteLine("The test space " + root + " does not have any file"
                                        );
         return(-1);
     }
     return(0);
 }
Exemplo n.º 7
0
        private ObjectDirectory.PackList ScanPacksImpl(ObjectDirectory.PackList old)
        {
            IDictionary <string, PackFile> forReuse = ReuseMap(old);
            FileSnapshot         snapshot           = FileSnapshot.Save(packDirectory);
            ICollection <string> names = ListPackDirectory();
            IList <PackFile>     list  = new AList <PackFile>(names.Count >> 2);
            bool foundNew = false;

            foreach (string indexName in names)
            {
                // Must match "pack-[0-9a-f]{40}.idx" to be an index.
                //
                if (indexName.Length != 49 || !indexName.EndsWith(".idx"))
                {
                    continue;
                }
                string @base    = Sharpen.Runtime.Substring(indexName, 0, indexName.Length - 4);
                string packName = @base + ".pack";
                if (!names.Contains(packName))
                {
                    // Sometimes C Git's HTTP fetch transport leaves a
                    // .idx file behind and does not download the .pack.
                    // We have to skip over such useless indexes.
                    //
                    continue;
                }
                PackFile oldPack = Sharpen.Collections.Remove(forReuse, packName);
                if (oldPack != null)
                {
                    list.AddItem(oldPack);
                    continue;
                }
                FilePath packFile = new FilePath(packDirectory, packName);
                FilePath idxFile  = new FilePath(packDirectory, indexName);
                list.AddItem(new PackFile(idxFile, packFile));
                foundNew = true;
            }
            // If we did not discover any new files, the modification time was not
            // changed, and we did not remove any files, then the set of files is
            // the same as the set we were given. Instead of building a new object
            // return the same collection.
            //
            if (!foundNew && forReuse.IsEmpty() && snapshot.Equals(old.snapshot))
            {
                old.snapshot.SetClean(snapshot);
                return(old);
            }
            foreach (PackFile p in forReuse.Values)
            {
                p.Close();
            }
            if (list.IsEmpty())
            {
                return(new ObjectDirectory.PackList(snapshot, NO_PACKS.packs));
            }
            PackFile[] r = Sharpen.Collections.ToArray(list, new PackFile[list.Count]);
            Arrays.Sort(r, PackFile.SORT);
            return(new ObjectDirectory.PackList(snapshot, r));
        }
Exemplo n.º 8
0
        /// <exception cref="System.IO.IOException"/>
        private IList <FileStatus> SingleThreadedListStatus(JobContext job, Path[] dirs, PathFilter
                                                            inputFilter, bool recursive)
        {
            IList <FileStatus>  result = new AList <FileStatus>();
            IList <IOException> errors = new AList <IOException>();

            for (int i = 0; i < dirs.Length; ++i)
            {
                Path         p       = dirs[i];
                FileSystem   fs      = p.GetFileSystem(job.GetConfiguration());
                FileStatus[] matches = fs.GlobStatus(p, inputFilter);
                if (matches == null)
                {
                    errors.AddItem(new IOException("Input path does not exist: " + p));
                }
                else
                {
                    if (matches.Length == 0)
                    {
                        errors.AddItem(new IOException("Input Pattern " + p + " matches 0 files"));
                    }
                    else
                    {
                        foreach (FileStatus globStat in matches)
                        {
                            if (globStat.IsDirectory())
                            {
                                RemoteIterator <LocatedFileStatus> iter = fs.ListLocatedStatus(globStat.GetPath());
                                while (iter.HasNext())
                                {
                                    LocatedFileStatus stat = iter.Next();
                                    if (inputFilter.Accept(stat.GetPath()))
                                    {
                                        if (recursive && stat.IsDirectory())
                                        {
                                            AddInputPathRecursively(result, fs, stat.GetPath(), inputFilter);
                                        }
                                        else
                                        {
                                            result.AddItem(stat);
                                        }
                                    }
                                }
                            }
                            else
                            {
                                result.AddItem(globStat);
                            }
                        }
                    }
                }
            }
            if (!errors.IsEmpty())
            {
                throw new InvalidInputException(errors);
            }
            return(result);
        }
Exemplo n.º 9
0
        /// <exception cref="System.IO.IOException"></exception>
        private ObjectDirectory.CachedPackList ScanCachedPacks(ObjectDirectory.CachedPackList
                                                               old)
        {
            FileSnapshot s = FileSnapshot.Save(cachedPacksFile);

            byte[] buf;
            try
            {
                buf = IOUtil.ReadFully(cachedPacksFile);
            }
            catch (FileNotFoundException)
            {
                buf = new byte[0];
            }
            if (old != null && old.snapshot.Equals(s) && Arrays.Equals(old.raw, buf))
            {
                old.snapshot.SetClean(s);
                return(old);
            }
            AList <LocalCachedPack> list = new AList <LocalCachedPack>(4);
            ICollection <ObjectId>  tips = new HashSet <ObjectId>();
            int ptr = 0;

            while (ptr < buf.Length)
            {
                if (buf[ptr] == '#' || buf[ptr] == '\n')
                {
                    ptr = RawParseUtils.NextLF(buf, ptr);
                    continue;
                }
                if (buf[ptr] == '+')
                {
                    tips.AddItem(ObjectId.FromString(buf, ptr + 2));
                    ptr = RawParseUtils.NextLF(buf, ptr + 2);
                    continue;
                }
                IList <string> names = new AList <string>(4);
                while (ptr < buf.Length && buf[ptr] == 'P')
                {
                    int end = RawParseUtils.NextLF(buf, ptr);
                    if (buf[end - 1] == '\n')
                    {
                        end--;
                    }
                    names.AddItem(RawParseUtils.Decode(buf, ptr + 2, end));
                    ptr = RawParseUtils.NextLF(buf, end);
                }
                if (!tips.IsEmpty() && !names.IsEmpty())
                {
                    list.AddItem(new LocalCachedPack(this, tips, names));
                    tips = new HashSet <ObjectId>();
                }
            }
            list.TrimToSize();
            return(new ObjectDirectory.CachedPackList(s, Sharpen.Collections.UnmodifiableList
                                                          (list), buf));
        }
Exemplo n.º 10
0
        /// <summary>
        /// Test the case that remove a data volume on a particular DataNode when the
        /// volume is actively being written.
        /// </summary>
        /// <param name="dataNodeIdx">the index of the DataNode to remove a volume.</param>
        /// <exception cref="System.IO.IOException"/>
        /// <exception cref="Org.Apache.Hadoop.Conf.ReconfigurationException"/>
        /// <exception cref="Sharpen.TimeoutException"/>
        /// <exception cref="System.Exception"/>
        /// <exception cref="Sharpen.BrokenBarrierException"/>
        private void TestRemoveVolumeBeingWrittenForDatanode(int dataNodeIdx)
        {
            // Starts DFS cluster with 3 DataNodes to form a pipeline.
            StartDFSCluster(1, 3);
            short              Replication            = 3;
            DataNode           dn                     = cluster.GetDataNodes()[dataNodeIdx];
            FileSystem         fs                     = cluster.GetFileSystem();
            Path               testFile               = new Path("/test");
            long               lastTimeDiskErrorCheck = dn.GetLastDiskErrorCheck();
            FSDataOutputStream @out                   = fs.Create(testFile, Replication);
            Random             rb                     = new Random(0);

            byte[] writeBuf = new byte[BlockSize / 2];
            // half of the block.
            rb.NextBytes(writeBuf);
            @out.Write(writeBuf);
            @out.Hflush();
            // Make FsDatasetSpi#finalizeBlock a time-consuming operation. So if the
            // BlockReceiver releases volume reference before finalizeBlock(), the blocks
            // on the volume will be removed, and finalizeBlock() throws IOE.
            FsDatasetSpi <FsVolumeSpi> data = dn.data;

            dn.data = Org.Mockito.Mockito.Spy(data);
            Org.Mockito.Mockito.DoAnswer(new _Answer_599(data)).When(dn.data).FinalizeBlock(Matchers.Any
                                                                                            <ExtendedBlock>());
            // Bypass the argument to FsDatasetImpl#finalizeBlock to verify that
            // the block is not removed, since the volume reference should not
            // be released at this point.
            CyclicBarrier  barrier = new CyclicBarrier(2);
            IList <string> oldDirs = GetDataDirs(dn);
            string         newDirs = oldDirs[1];
            // Remove the first volume.
            IList <Exception> exceptions = new AList <Exception>();

            Sharpen.Thread reconfigThread = new _Thread_616(barrier, dn, newDirs, exceptions);
            reconfigThread.Start();
            barrier.Await();
            rb.NextBytes(writeBuf);
            @out.Write(writeBuf);
            @out.Hflush();
            @out.Close();
            reconfigThread.Join();
            // Verify the file has sufficient replications.
            DFSTestUtil.WaitReplication(fs, testFile, Replication);
            // Read the content back
            byte[] content = DFSTestUtil.ReadFileBuffer(fs, testFile);
            NUnit.Framework.Assert.AreEqual(BlockSize, content.Length);
            // If an IOException thrown from BlockReceiver#run, it triggers
            // DataNode#checkDiskError(). So we can test whether checkDiskError() is called,
            // to see whether there is IOException in BlockReceiver#run().
            NUnit.Framework.Assert.AreEqual(lastTimeDiskErrorCheck, dn.GetLastDiskErrorCheck(
                                                ));
            if (!exceptions.IsEmpty())
            {
                throw new IOException(exceptions[0].InnerException);
            }
        }
Exemplo n.º 11
0
 /// <exception cref="NGit.Api.Errors.GitAPIException"></exception>
 public override ICollection <string> Call()
 {
     CheckCallable();
     try
     {
         SubmoduleWalk generator = SubmoduleWalk.ForIndex(repo);
         if (!paths.IsEmpty())
         {
             generator.SetFilter(PathFilterGroup.CreateFromStrings(paths));
         }
         StoredConfig   config      = repo.GetConfig();
         IList <string> initialized = new AList <string>();
         while (generator.Next())
         {
             // Ignore entry if URL is already present in config file
             if (generator.GetConfigUrl() != null)
             {
                 continue;
             }
             string path = generator.GetPath();
             // Copy 'url' and 'update' fields from .gitmodules to config
             // file
             string url    = generator.GetRemoteUrl();
             string update = generator.GetModulesUpdate();
             if (url != null)
             {
                 config.SetString(ConfigConstants.CONFIG_SUBMODULE_SECTION, path, ConfigConstants.
                                  CONFIG_KEY_URL, url);
             }
             if (update != null)
             {
                 config.SetString(ConfigConstants.CONFIG_SUBMODULE_SECTION, path, ConfigConstants.
                                  CONFIG_KEY_UPDATE, update);
             }
             if (url != null || update != null)
             {
                 initialized.AddItem(path);
             }
         }
         // Save repository config if any values were updated
         if (!initialized.IsEmpty())
         {
             config.Save();
         }
         return(initialized);
     }
     catch (IOException e)
     {
         throw new JGitInternalException(e.Message, e);
     }
     catch (ConfigInvalidException e)
     {
         throw new JGitInternalException(e.Message, e);
     }
 }
Exemplo n.º 12
0
        public virtual void ArchiveCompletedReservations(long tick)
        {
            // Since we are looking for old reservations, read lock is optimal
            Log.Debug("Running archival at time: {}", tick);
            IList <InMemoryReservationAllocation> expiredReservations = new AList <InMemoryReservationAllocation
                                                                                   >();

            readLock.Lock();
            // archive reservations and delete the ones which are beyond
            // the reservation policy "window"
            try
            {
                long archivalTime = tick - policy.GetValidWindow();
                ReservationInterval searchInterval = new ReservationInterval(archivalTime, archivalTime
                                                                             );
                SortedDictionary <ReservationInterval, ICollection <InMemoryReservationAllocation> >
                reservations = currentReservations.HeadMap(searchInterval, true);
                if (!reservations.IsEmpty())
                {
                    foreach (ICollection <InMemoryReservationAllocation> reservationEntries in reservations
                             .Values)
                    {
                        foreach (InMemoryReservationAllocation reservation in reservationEntries)
                        {
                            if (reservation.GetEndTime() <= archivalTime)
                            {
                                expiredReservations.AddItem(reservation);
                            }
                        }
                    }
                }
            }
            finally
            {
                readLock.Unlock();
            }
            if (expiredReservations.IsEmpty())
            {
                return;
            }
            // Need write lock only if there are any reservations to be deleted
            writeLock.Lock();
            try
            {
                foreach (InMemoryReservationAllocation expiredReservation in expiredReservations)
                {
                    RemoveReservation(expiredReservation);
                }
            }
            finally
            {
                writeLock.Unlock();
            }
        }
Exemplo n.º 13
0
        public virtual void TestByteArrayManager()
        {
            int  countThreshold         = 32;
            int  countLimit             = 64;
            long countResetTimePeriodMs = 1000L;

            ByteArrayManager.Impl bam = new ByteArrayManager.Impl(new ByteArrayManager.Conf(countThreshold
                                                                                            , countLimit, countResetTimePeriodMs));
            ByteArrayManager.CounterMap counters = bam.GetCounters();
            ByteArrayManager.ManagerMap managers = bam.GetManagers();
            ExecutorService             pool     = Executors.NewFixedThreadPool(128);

            TestByteArrayManager.Runner[] runners = new TestByteArrayManager.Runner[TestByteArrayManager.Runner
                                                                                    .NumRunners];
            Sharpen.Thread[] threads = new Sharpen.Thread[runners.Length];
            int num = 1 << 10;

            for (int i = 0; i < runners.Length; i++)
            {
                runners[i] = new TestByteArrayManager.Runner(i, countThreshold, countLimit, pool,
                                                             i, bam);
                threads[i] = runners[i].Start(num);
            }
            IList <Exception> exceptions = new AList <Exception>();

            Sharpen.Thread randomRecycler = new _Thread_332(runners, exceptions, threads);
            randomRecycler.Start();
            randomRecycler.Join();
            NUnit.Framework.Assert.IsTrue(exceptions.IsEmpty());
            NUnit.Framework.Assert.IsNull(counters.Get(0, false));
            for (int i_1 = 1; i_1 < runners.Length; i_1++)
            {
                if (!runners[i_1].assertionErrors.IsEmpty())
                {
                    foreach (Exception e in runners[i_1].assertionErrors)
                    {
                        Log.Error("AssertionError " + i_1, e);
                    }
                    NUnit.Framework.Assert.Fail(runners[i_1].assertionErrors.Count + " AssertionError(s)"
                                                );
                }
                int  arrayLength                      = TestByteArrayManager.Runner.Index2arrayLength(i_1);
                bool exceedCountThreshold             = counters.Get(arrayLength, false).GetCount() > countThreshold;
                ByteArrayManager.FixedLengthManager m = managers.Get(arrayLength, false);
                if (exceedCountThreshold)
                {
                    NUnit.Framework.Assert.IsNotNull(m);
                }
                else
                {
                    NUnit.Framework.Assert.IsNull(m);
                }
            }
        }
Exemplo n.º 14
0
        /// <summary>Update any smudged entries with information from the working tree.</summary>
        /// <remarks>Update any smudged entries with information from the working tree.</remarks>
        /// <exception cref="System.IO.IOException">System.IO.IOException</exception>
        private void UpdateSmudgedEntries()
        {
            TreeWalk       walk  = new TreeWalk(repository);
            IList <string> paths = new AList <string>(128);

            try
            {
                for (int i = 0; i < entryCnt; i++)
                {
                    if (sortedEntries[i].IsSmudged)
                    {
                        paths.AddItem(sortedEntries[i].PathString);
                    }
                }
                if (paths.IsEmpty())
                {
                    return;
                }
                walk.Filter = PathFilterGroup.CreateFromStrings(paths);
                DirCacheIterator iIter = new DirCacheIterator(this);
                FileTreeIterator fIter = new FileTreeIterator(repository);
                walk.AddTree(iIter);
                walk.AddTree(fIter);
                walk.Recursive = true;
                while (walk.Next())
                {
                    iIter = walk.GetTree <DirCacheIterator>(0);
                    if (iIter == null)
                    {
                        continue;
                    }
                    fIter = walk.GetTree <FileTreeIterator>(1);
                    if (fIter == null)
                    {
                        continue;
                    }
                    DirCacheEntry entry = iIter.GetDirCacheEntry();
                    if (entry.IsSmudged && iIter.IdEqual(fIter))
                    {
                        entry.SetLength(fIter.GetEntryLength());
                        entry.LastModified = fIter.GetEntryLastModified();
                    }
                }
            }
            finally
            {
                walk.Release();
            }
        }
Exemplo n.º 15
0
        /// <summary>Pick up replica node set for deleting replica as over-replicated.</summary>
        /// <remarks>
        /// Pick up replica node set for deleting replica as over-replicated.
        /// First set contains replica nodes on rack with more than one
        /// replica while second set contains remaining replica nodes.
        /// If first is not empty, divide first set into two subsets:
        /// moreThanOne contains nodes on nodegroup with more than one replica
        /// exactlyOne contains the remaining nodes in first set
        /// then pickup priSet if not empty.
        /// If first is empty, then pick second.
        /// </remarks>
        protected internal override ICollection <DatanodeStorageInfo> PickupReplicaSet(ICollection
                                                                                       <DatanodeStorageInfo> first, ICollection <DatanodeStorageInfo> second)
        {
            // If no replica within same rack, return directly.
            if (first.IsEmpty())
            {
                return(second);
            }
            // Split data nodes in the first set into two sets,
            // moreThanOne contains nodes on nodegroup with more than one replica
            // exactlyOne contains the remaining nodes
            IDictionary <string, IList <DatanodeStorageInfo> > nodeGroupMap = new Dictionary <string
                                                                                              , IList <DatanodeStorageInfo> >();

            foreach (DatanodeStorageInfo storage in first)
            {
                string nodeGroupName = NetworkTopology.GetLastHalf(storage.GetDatanodeDescriptor(
                                                                       ).GetNetworkLocation());
                IList <DatanodeStorageInfo> storageList = nodeGroupMap[nodeGroupName];
                if (storageList == null)
                {
                    storageList = new AList <DatanodeStorageInfo>();
                    nodeGroupMap[nodeGroupName] = storageList;
                }
                storageList.AddItem(storage);
            }
            IList <DatanodeStorageInfo> moreThanOne = new AList <DatanodeStorageInfo>();
            IList <DatanodeStorageInfo> exactlyOne  = new AList <DatanodeStorageInfo>();

            // split nodes into two sets
            foreach (IList <DatanodeStorageInfo> datanodeList in nodeGroupMap.Values)
            {
                if (datanodeList.Count == 1)
                {
                    // exactlyOne contains nodes on nodegroup with exactly one replica
                    exactlyOne.AddItem(datanodeList[0]);
                }
                else
                {
                    // moreThanOne contains nodes on nodegroup with more than one replica
                    Sharpen.Collections.AddAll(moreThanOne, datanodeList);
                }
            }
            return(moreThanOne.IsEmpty() ? exactlyOne : moreThanOne);
        }
Exemplo n.º 16
0
        /// <summary>
        /// Set up the DistributedCache related configs to make
        /// <see cref="Org.Apache.Hadoop.Mapreduce.Filecache.DistributedCache.GetLocalCacheFiles(Org.Apache.Hadoop.Conf.Configuration)
        ///     "/>
        /// and
        /// <see cref="Org.Apache.Hadoop.Mapreduce.Filecache.DistributedCache.GetLocalCacheArchives(Org.Apache.Hadoop.Conf.Configuration)
        ///     "/>
        /// working.
        /// </summary>
        /// <param name="conf"/>
        /// <exception cref="System.IO.IOException"/>
        public static void SetupDistributedCacheLocal(Configuration conf)
        {
            string localWorkDir = Runtime.Getenv("PWD");

            //        ^ ^ all symlinks are created in the current work-dir
            // Update the configuration object with localized archives.
            URI[] cacheArchives = DistributedCache.GetCacheArchives(conf);
            if (cacheArchives != null)
            {
                IList <string> localArchives = new AList <string>();
                for (int i = 0; i < cacheArchives.Length; ++i)
                {
                    URI    u        = cacheArchives[i];
                    Path   p        = new Path(u);
                    Path   name     = new Path((null == u.GetFragment()) ? p.GetName() : u.GetFragment());
                    string linkName = name.ToUri().GetPath();
                    localArchives.AddItem(new Path(localWorkDir, linkName).ToUri().GetPath());
                }
                if (!localArchives.IsEmpty())
                {
                    conf.Set(MRJobConfig.CacheLocalarchives, StringUtils.ArrayToString(Sharpen.Collections.ToArray
                                                                                           (localArchives, new string[localArchives.Count])));
                }
            }
            // Update the configuration object with localized files.
            URI[] cacheFiles = DistributedCache.GetCacheFiles(conf);
            if (cacheFiles != null)
            {
                IList <string> localFiles = new AList <string>();
                for (int i = 0; i < cacheFiles.Length; ++i)
                {
                    URI    u        = cacheFiles[i];
                    Path   p        = new Path(u);
                    Path   name     = new Path((null == u.GetFragment()) ? p.GetName() : u.GetFragment());
                    string linkName = name.ToUri().GetPath();
                    localFiles.AddItem(new Path(localWorkDir, linkName).ToUri().GetPath());
                }
                if (!localFiles.IsEmpty())
                {
                    conf.Set(MRJobConfig.CacheLocalfiles, StringUtils.ArrayToString(Sharpen.Collections.ToArray
                                                                                        (localFiles, new string[localFiles.Count])));
                }
            }
        }
Exemplo n.º 17
0
        /// <summary>Chooses datanode locations for caching from a list of valid possibilities.
        ///     </summary>
        /// <remarks>
        /// Chooses datanode locations for caching from a list of valid possibilities.
        /// Non-stale nodes are chosen before stale nodes.
        /// </remarks>
        /// <param name="possibilities">List of candidate datanodes</param>
        /// <param name="neededCached">Number of replicas needed</param>
        /// <param name="staleInterval">Age of a stale datanode</param>
        /// <returns>A list of chosen datanodes</returns>
        private static IList <DatanodeDescriptor> ChooseDatanodesForCaching(IList <DatanodeDescriptor
                                                                                   > possibilities, int neededCached, long staleInterval)
        {
            // Make a copy that we can modify
            IList <DatanodeDescriptor> targets = new AList <DatanodeDescriptor>(possibilities);
            // Selected targets
            IList <DatanodeDescriptor> chosen = new List <DatanodeDescriptor>();
            // Filter out stale datanodes
            IList <DatanodeDescriptor>       stale = new List <DatanodeDescriptor>();
            IEnumerator <DatanodeDescriptor> it    = targets.GetEnumerator();

            while (it.HasNext())
            {
                DatanodeDescriptor d = it.Next();
                if (d.IsStale(staleInterval))
                {
                    it.Remove();
                    stale.AddItem(d);
                }
            }
            // Select targets
            while (chosen.Count < neededCached)
            {
                // Try to use stale nodes if we're out of non-stale nodes, else we're done
                if (targets.IsEmpty())
                {
                    if (!stale.IsEmpty())
                    {
                        targets = stale;
                    }
                    else
                    {
                        break;
                    }
                }
                // Select a random target
                DatanodeDescriptor target = ChooseRandomDatanodeByRemainingCapacity(targets);
                chosen.AddItem(target);
                targets.Remove(target);
            }
            return(chosen);
        }
Exemplo n.º 18
0
        public static IList <string> GetProxyHostsAndPortsForAmFilter(Configuration conf)
        {
            IList <string> addrs     = new AList <string>();
            string         proxyAddr = conf.Get(YarnConfiguration.ProxyAddress);

            // If PROXY_ADDRESS isn't set, fallback to RM_WEBAPP(_HTTPS)_ADDRESS
            // There could be multiple if using RM HA
            if (proxyAddr == null || proxyAddr.IsEmpty())
            {
                // If RM HA is enabled, try getting those addresses
                if (HAUtil.IsHAEnabled(conf))
                {
                    IList <string> haAddrs = RMHAUtils.GetRMHAWebappAddresses(new YarnConfiguration(conf
                                                                                                    ));
                    foreach (string addr in haAddrs)
                    {
                        try
                        {
                            IPEndPoint socketAddr = NetUtils.CreateSocketAddr(addr);
                            addrs.AddItem(GetResolvedAddress(socketAddr));
                        }
                        catch (ArgumentException)
                        {
                        }
                    }
                }
                // skip if can't resolve
                // If couldn't resolve any of the addresses or not using RM HA, fallback
                if (addrs.IsEmpty())
                {
                    addrs.AddItem(GetResolvedRMWebAppURLWithoutScheme(conf));
                }
            }
            else
            {
                addrs.AddItem(proxyAddr);
            }
            return(addrs);
        }
Exemplo n.º 19
0
        /// <summary>
        /// Expand globs in the given <code>filePattern</code> into a collection of
        /// file patterns so that in the expanded set no file pattern has a
        /// slash character ("/") in a curly bracket pair.
        /// </summary>
        /// <param name="filePattern"/>
        /// <returns>expanded file patterns</returns>
        /// <exception cref="System.IO.IOException"></exception>
        public static IList <string> Expand(string filePattern)
        {
            IList <string> fullyExpanded = new AList <string>();
            IList <GlobExpander.StringWithOffset> toExpand = new AList <GlobExpander.StringWithOffset
                                                                        >();

            toExpand.AddItem(new GlobExpander.StringWithOffset(filePattern, 0));
            while (!toExpand.IsEmpty())
            {
                GlobExpander.StringWithOffset         path     = toExpand.Remove(0);
                IList <GlobExpander.StringWithOffset> expanded = ExpandLeftmost(path);
                if (expanded == null)
                {
                    fullyExpanded.AddItem(path.@string);
                }
                else
                {
                    toExpand.AddRange(0, expanded);
                }
            }
            return(fullyExpanded);
        }
Exemplo n.º 20
0
        /// <exception cref="System.IO.IOException"></exception>
        private IDictionary <string, OpenSshConfig.Host> Parse(InputStream @in)
        {
            IDictionary <string, OpenSshConfig.Host> m = new LinkedHashMap <string, OpenSshConfig.Host
                                                                            >();
            BufferedReader             br      = new BufferedReader(new InputStreamReader(@in));
            IList <OpenSshConfig.Host> current = new AList <OpenSshConfig.Host>(4);
            string line;

            while ((line = br.ReadLine()) != null)
            {
                line = line.Trim();
                if (line.Length == 0 || line.StartsWith("#"))
                {
                    continue;
                }
                string[] parts    = line.Split("[ \t]*[= \t]", 2);
                string   keyword  = parts[0].Trim();
                string   argValue = parts[1].Trim();
                if (StringUtils.EqualsIgnoreCase("Host", keyword))
                {
                    current.Clear();
                    foreach (string pattern in argValue.Split("[ \t]"))
                    {
                        string             name = Dequote(pattern);
                        OpenSshConfig.Host c    = m.Get(name);
                        if (c == null)
                        {
                            c = new OpenSshConfig.Host();
                            m.Put(name, c);
                        }
                        current.AddItem(c);
                    }
                    continue;
                }
                if (current.IsEmpty())
                {
                    // We received an option outside of a Host block. We
                    // don't know who this should match against, so skip.
                    //
                    continue;
                }
                if (StringUtils.EqualsIgnoreCase("HostName", keyword))
                {
                    foreach (OpenSshConfig.Host c in current)
                    {
                        if (c.hostName == null)
                        {
                            c.hostName = Dequote(argValue);
                        }
                    }
                }
                else
                {
                    if (StringUtils.EqualsIgnoreCase("User", keyword))
                    {
                        foreach (OpenSshConfig.Host c in current)
                        {
                            if (c.user == null)
                            {
                                c.user = Dequote(argValue);
                            }
                        }
                    }
                    else
                    {
                        if (StringUtils.EqualsIgnoreCase("Port", keyword))
                        {
                            try
                            {
                                int port = System.Convert.ToInt32(Dequote(argValue));
                                foreach (OpenSshConfig.Host c in current)
                                {
                                    if (c.port == 0)
                                    {
                                        c.port = port;
                                    }
                                }
                            }
                            catch (FormatException)
                            {
                            }
                        }
                        else
                        {
                            // Bad port number. Don't set it.
                            if (StringUtils.EqualsIgnoreCase("IdentityFile", keyword))
                            {
                                foreach (OpenSshConfig.Host c in current)
                                {
                                    if (c.identityFile == null)
                                    {
                                        c.identityFile = ToFile(Dequote(argValue));
                                    }
                                }
                            }
                            else
                            {
                                if (StringUtils.EqualsIgnoreCase("PreferredAuthentications", keyword))
                                {
                                    foreach (OpenSshConfig.Host c in current)
                                    {
                                        if (c.preferredAuthentications == null)
                                        {
                                            c.preferredAuthentications = Nows(Dequote(argValue));
                                        }
                                    }
                                }
                                else
                                {
                                    if (StringUtils.EqualsIgnoreCase("BatchMode", keyword))
                                    {
                                        foreach (OpenSshConfig.Host c in current)
                                        {
                                            if (c.batchMode == null)
                                            {
                                                c.batchMode = Yesno(Dequote(argValue));
                                            }
                                        }
                                    }
                                    else
                                    {
                                        if (StringUtils.EqualsIgnoreCase("StrictHostKeyChecking", keyword))
                                        {
                                            string value = Dequote(argValue);
                                            foreach (OpenSshConfig.Host c in current)
                                            {
                                                if (c.strictHostKeyChecking == null)
                                                {
                                                    c.strictHostKeyChecking = value;
                                                }
                                            }
                                        }
                                    }
                                }
                            }
                        }
                    }
                }
            }
            return(m);
        }
Exemplo n.º 21
0
        /// <summary>Main entry point.</summary>
        /// <param name="args">command-line arguments</param>
        public static void Main(string[] args)
        {
            if (args.Length < 1 || args[0].Equals("-h") || args[0].Equals("--help"))
            {
                System.Console.Out.WriteLine(usage);
                return;
            }
            // Copy args, because CommandFormat mutates the list.
            IList <string> argsList = new AList <string>(Arrays.AsList(args));
            CommandFormat  cf       = new CommandFormat(0, int.MaxValue, "-glob", "-jar");

            try
            {
                cf.Parse(argsList);
            }
            catch (CommandFormat.UnknownOptionException)
            {
                Terminate(1, "unrecognized option");
                return;
            }
            string classPath = Runtime.GetProperty("java.class.path");

            if (cf.GetOpt("-glob"))
            {
                // The classpath returned from the property has been globbed already.
                System.Console.Out.WriteLine(classPath);
            }
            else
            {
                if (cf.GetOpt("-jar"))
                {
                    if (argsList.IsEmpty() || argsList[0] == null || argsList[0].IsEmpty())
                    {
                        Terminate(1, "-jar option requires path of jar file to write");
                        return;
                    }
                    // Write the classpath into the manifest of a temporary jar file.
                    Path   workingDir = new Path(Runtime.GetProperty("user.dir"));
                    string tmpJarPath;
                    try
                    {
                        tmpJarPath = FileUtil.CreateJarWithClassPath(classPath, workingDir, Runtime.GetEnv
                                                                         ())[0];
                    }
                    catch (IOException e)
                    {
                        Terminate(1, "I/O error creating jar: " + e.Message);
                        return;
                    }
                    // Rename the temporary file to its final location.
                    string jarPath = argsList[0];
                    try
                    {
                        FileUtil.ReplaceFile(new FilePath(tmpJarPath), new FilePath(jarPath));
                    }
                    catch (IOException e)
                    {
                        Terminate(1, "I/O error renaming jar temporary file to path: " + e.Message);
                        return;
                    }
                }
            }
        }
Exemplo n.º 22
0
        /// <summary>
        /// Executes the
        /// <code>LsRemote</code>
        /// command with all the options and parameters
        /// collected by the setter methods (e.g.
        /// <see cref="SetHeads(bool)">SetHeads(bool)</see>
        /// ) of this
        /// class. Each instance of this class should only be used for one invocation
        /// of the command. Don't call this method twice on an instance.
        /// </summary>
        /// <returns>a collection of references in the remote repository</returns>
        /// <exception cref="NGit.Api.Errors.InvalidRemoteException">when called with an invalid remote uri
        ///     </exception>
        /// <exception cref="NGit.Api.Errors.TransportException">for errors that occurs during transport
        ///     </exception>
        /// <exception cref="NGit.Api.Errors.GitAPIException"></exception>
        public override ICollection <Ref> Call()
        {
            CheckCallable();
            NGit.Transport.Transport transport = null;
            FetchConnection          fc        = null;

            try
            {
                transport = NGit.Transport.Transport.Open(repo, remote);
                transport.SetOptionUploadPack(uploadPack);
                Configure(transport);
                ICollection <RefSpec> refSpecs = new AList <RefSpec>(1);
                if (tags)
                {
                    refSpecs.AddItem(new RefSpec("refs/tags/*:refs/remotes/origin/tags/*"));
                }
                if (heads)
                {
                    refSpecs.AddItem(new RefSpec("refs/heads/*:refs/remotes/origin/*"));
                }
                ICollection <Ref>         refs;
                IDictionary <string, Ref> refmap = new Dictionary <string, Ref>();
                fc   = transport.OpenFetch();
                refs = fc.GetRefs();
                if (refSpecs.IsEmpty())
                {
                    foreach (Ref r in refs)
                    {
                        refmap.Put(r.GetName(), r);
                    }
                }
                else
                {
                    foreach (Ref r_1 in refs)
                    {
                        foreach (RefSpec rs in refSpecs)
                        {
                            if (rs.MatchSource(r_1))
                            {
                                refmap.Put(r_1.GetName(), r_1);
                                break;
                            }
                        }
                    }
                }
                return(refmap.Values);
            }
            catch (URISyntaxException)
            {
                throw new InvalidRemoteException(MessageFormat.Format(JGitText.Get().invalidRemote
                                                                      , remote));
            }
            catch (NGit.Errors.NotSupportedException e)
            {
                throw new JGitInternalException(JGitText.Get().exceptionCaughtDuringExecutionOfLsRemoteCommand
                                                , e);
            }
            catch (NGit.Errors.TransportException e)
            {
                throw new NGit.Errors.TransportException(e.Message, e);
            }
            finally
            {
                if (fc != null)
                {
                    fc.Close();
                }
                if (transport != null)
                {
                    transport.Close();
                }
            }
        }
Exemplo n.º 23
0
		private ObjectDirectory.PackList ScanPacksImpl(ObjectDirectory.PackList old)
		{
			IDictionary<string, PackFile> forReuse = ReuseMap(old);
			FileSnapshot snapshot = FileSnapshot.Save(packDirectory);
			ICollection<string> names = ListPackDirectory();
			IList<PackFile> list = new AList<PackFile>(names.Count >> 2);
			bool foundNew = false;
			foreach (string indexName in names)
			{
				// Must match "pack-[0-9a-f]{40}.idx" to be an index.
				//
				if (indexName.Length != 49 || !indexName.EndsWith(".idx"))
				{
					continue;
				}
				string @base = Sharpen.Runtime.Substring(indexName, 0, indexName.Length - 4);
				string packName = @base + ".pack";
				if (!names.Contains(packName))
				{
					// Sometimes C Git's HTTP fetch transport leaves a
					// .idx file behind and does not download the .pack.
					// We have to skip over such useless indexes.
					//
					continue;
				}
				PackFile oldPack = Sharpen.Collections.Remove(forReuse, packName);
				if (oldPack != null)
				{
					list.AddItem(oldPack);
					continue;
				}
				FilePath packFile = new FilePath(packDirectory, packName);
				FilePath idxFile = new FilePath(packDirectory, indexName);
				list.AddItem(new PackFile(idxFile, packFile));
				foundNew = true;
			}
			// If we did not discover any new files, the modification time was not
			// changed, and we did not remove any files, then the set of files is
			// the same as the set we were given. Instead of building a new object
			// return the same collection.
			//
			if (!foundNew && forReuse.IsEmpty() && snapshot.Equals(old.snapshot))
			{
				old.snapshot.SetClean(snapshot);
				return old;
			}
			foreach (PackFile p in forReuse.Values)
			{
				p.Close();
			}
			if (list.IsEmpty())
			{
				return new ObjectDirectory.PackList(snapshot, NO_PACKS.packs);
			}
			PackFile[] r = Sharpen.Collections.ToArray(list, new PackFile[list.Count]);
			Arrays.Sort(r, PackFile.SORT);
			return new ObjectDirectory.PackList(snapshot, r);
		}
Exemplo n.º 24
0
        /// <exception cref="NGit.Api.Errors.RefNotFoundException"></exception>
        /// <exception cref="System.IO.IOException"></exception>
        /// <exception cref="NGit.Api.Errors.NoHeadException"></exception>
        /// <exception cref="NGit.Api.Errors.JGitInternalException"></exception>
        private RebaseResult InitFilesAndRewind()
        {
            // we need to store everything into files so that we can implement
            // --skip, --continue, and --abort
            // first of all, we determine the commits to be applied
            IList <RevCommit> cherryPickList = new AList <RevCommit>();
            Ref head = repo.GetRef(Constants.HEAD);

            if (head == null || head.GetObjectId() == null)
            {
                throw new RefNotFoundException(MessageFormat.Format(JGitText.Get().refNotResolved
                                                                    , Constants.HEAD));
            }
            string headName;

            if (head.IsSymbolic())
            {
                headName = head.GetTarget().GetName();
            }
            else
            {
                headName = "detached HEAD";
            }
            ObjectId headId = head.GetObjectId();

            if (headId == null)
            {
                throw new RefNotFoundException(MessageFormat.Format(JGitText.Get().refNotResolved
                                                                    , Constants.HEAD));
            }
            RevCommit headCommit = walk.LookupCommit(headId);

            monitor.BeginTask(JGitText.Get().obtainingCommitsForCherryPick, ProgressMonitor.UNKNOWN
                              );
            LogCommand           cmd          = new Git(repo).Log().AddRange(upstreamCommit, headCommit);
            Iterable <RevCommit> commitsToUse = cmd.Call();

            foreach (RevCommit commit in commitsToUse)
            {
                cherryPickList.AddItem(commit);
            }
            // if the upstream commit is in a direct line to the current head,
            // the log command will not report any commits; in this case,
            // we create the cherry-pick list ourselves
            if (cherryPickList.IsEmpty())
            {
                Iterable <RevCommit> parents = new Git(repo).Log().Add(upstreamCommit).Call();
                foreach (RevCommit parent in parents)
                {
                    if (parent.Equals(headCommit))
                    {
                        break;
                    }
                    if (parent.ParentCount != 1)
                    {
                        throw new JGitInternalException(JGitText.Get().canOnlyCherryPickCommitsWithOneParent
                                                        );
                    }
                    cherryPickList.AddItem(parent);
                }
            }
            // nothing to do: return with UP_TO_DATE_RESULT
            if (cherryPickList.IsEmpty())
            {
                return(RebaseResult.UP_TO_DATE_RESULT);
            }
            Sharpen.Collections.Reverse(cherryPickList);
            // create the folder for the meta information
            FileUtils.Mkdir(rebaseDir);
            CreateFile(repo.Directory, Constants.ORIG_HEAD, headId.Name);
            CreateFile(rebaseDir, REBASE_HEAD, headId.Name);
            CreateFile(rebaseDir, HEAD_NAME, headName);
            CreateFile(rebaseDir, ONTO, upstreamCommit.Name);
            CreateFile(rebaseDir, INTERACTIVE, string.Empty);
            BufferedWriter fw = new BufferedWriter(new OutputStreamWriter(new FileOutputStream
                                                                              (new FilePath(rebaseDir, GIT_REBASE_TODO)), Constants.CHARACTER_ENCODING));

            fw.Write("# Created by EGit: rebasing " + upstreamCommit.Name + " onto " + headId
                     .Name);
            fw.NewLine();
            try
            {
                StringBuilder sb     = new StringBuilder();
                ObjectReader  reader = walk.GetObjectReader();
                foreach (RevCommit commit_1 in cherryPickList)
                {
                    sb.Length = 0;
                    sb.Append(RebaseCommand.Action.PICK.ToToken());
                    sb.Append(" ");
                    sb.Append(reader.Abbreviate(commit_1).Name);
                    sb.Append(" ");
                    sb.Append(commit_1.GetShortMessage());
                    fw.Write(sb.ToString());
                    fw.NewLine();
                }
            }
            finally
            {
                fw.Close();
            }
            monitor.EndTask();
            // we rewind to the upstream commit
            monitor.BeginTask(MessageFormat.Format(JGitText.Get().rewinding, upstreamCommit.GetShortMessage
                                                       ()), ProgressMonitor.UNKNOWN);
            CheckoutCommit(upstreamCommit);
            monitor.EndTask();
            return(null);
        }
Exemplo n.º 25
0
		public IDictionary<string, object> GetAllDocs(QueryOptions options)
		{
			IDictionary<string, object> result = new Dictionary<string, object>();
			IList<QueryRow> rows = new AList<QueryRow>();
			if (options == null)
			{
				options = new QueryOptions();
			}
			bool includeDeletedDocs = (options.GetAllDocsMode() == Query.AllDocsMode.IncludeDeleted
				);
			long updateSeq = 0;
			if (options.IsUpdateSeq())
			{
				updateSeq = GetLastSequenceNumber();
			}
			// TODO: needs to be atomic with the following SELECT
			StringBuilder sql = new StringBuilder("SELECT revs.doc_id, docid, revid, sequence"
				);
			if (options.IsIncludeDocs())
			{
				sql.Append(", json");
			}
			if (includeDeletedDocs)
			{
				sql.Append(", deleted");
			}
			sql.Append(" FROM revs, docs WHERE");
			if (options.GetKeys() != null)
			{
				if (options.GetKeys().Count == 0)
				{
					return result;
				}
				string commaSeperatedIds = JoinQuotedObjects(options.GetKeys());
				sql.Append(string.Format(" revs.doc_id IN (SELECT doc_id FROM docs WHERE docid IN (%s)) AND"
					, commaSeperatedIds));
			}
			sql.Append(" docs.doc_id = revs.doc_id AND current=1");
			if (!includeDeletedDocs)
			{
				sql.Append(" AND deleted=0");
			}
			IList<string> args = new AList<string>();
			object minKey = options.GetStartKey();
			object maxKey = options.GetEndKey();
			bool inclusiveMin = true;
			bool inclusiveMax = options.IsInclusiveEnd();
			if (options.IsDescending())
			{
				minKey = maxKey;
				maxKey = options.GetStartKey();
				inclusiveMin = inclusiveMax;
				inclusiveMax = true;
			}
			if (minKey != null)
			{
				System.Diagnostics.Debug.Assert((minKey is string));
				sql.Append((inclusiveMin ? " AND docid >= ?" : " AND docid > ?"));
				args.AddItem((string)minKey);
			}
			if (maxKey != null)
			{
				System.Diagnostics.Debug.Assert((maxKey is string));
				sql.Append((inclusiveMax ? " AND docid <= ?" : " AND docid < ?"));
				args.AddItem((string)maxKey);
			}
			sql.Append(string.Format(" ORDER BY docid %s, %s revid DESC LIMIT ? OFFSET ?", (options
				.IsDescending() ? "DESC" : "ASC"), (includeDeletedDocs ? "deleted ASC," : string.Empty
				)));
			args.AddItem(Sharpen.Extensions.ToString(options.GetLimit()));
			args.AddItem(Sharpen.Extensions.ToString(options.GetSkip()));
			Cursor cursor = null;
			IDictionary<string, QueryRow> docs = new Dictionary<string, QueryRow>();
			try
			{
				cursor = database.RawQuery(sql.ToString(), Sharpen.Collections.ToArray(args, new 
					string[args.Count]));
				bool keepGoing = cursor.MoveToNext();
				while (keepGoing)
				{
					long docNumericID = cursor.GetLong(0);
					string docId = cursor.GetString(1);
					string revId = cursor.GetString(2);
					long sequenceNumber = cursor.GetLong(3);
					bool deleted = includeDeletedDocs && cursor.GetInt(GetDeletedColumnIndex(options)
						) > 0;
					IDictionary<string, object> docContents = null;
					if (options.IsIncludeDocs())
					{
						byte[] json = cursor.GetBlob(4);
						docContents = DocumentPropertiesFromJSON(json, docId, revId, deleted, sequenceNumber
							, options.GetContentOptions());
					}
					// Iterate over following rows with the same doc_id -- these are conflicts.
					// Skip them, but collect their revIDs if the 'conflicts' option is set:
					IList<string> conflicts = new AList<string>();
					while (((keepGoing = cursor.MoveToNext()) == true) && cursor.GetLong(0) == docNumericID
						)
					{
						if (options.GetAllDocsMode() == Query.AllDocsMode.ShowConflicts || options.GetAllDocsMode
							() == Query.AllDocsMode.OnlyConflicts)
						{
							if (conflicts.IsEmpty())
							{
								conflicts.AddItem(revId);
							}
							conflicts.AddItem(cursor.GetString(2));
						}
					}
					if (options.GetAllDocsMode() == Query.AllDocsMode.OnlyConflicts && conflicts.IsEmpty
						())
					{
						continue;
					}
					IDictionary<string, object> value = new Dictionary<string, object>();
					value.Put("rev", revId);
					value.Put("_conflicts", conflicts);
					if (includeDeletedDocs)
					{
						value.Put("deleted", (deleted ? true : null));
					}
					QueryRow change = new QueryRow(docId, sequenceNumber, docId, value, docContents);
					change.SetDatabase(this);
					if (options.GetKeys() != null)
					{
						docs.Put(docId, change);
					}
					else
					{
						rows.AddItem(change);
					}
				}
				if (options.GetKeys() != null)
				{
					foreach (object docIdObject in options.GetKeys())
					{
						if (docIdObject is string)
						{
							string docId = (string)docIdObject;
							QueryRow change = docs.Get(docId);
							if (change == null)
							{
								IDictionary<string, object> value = new Dictionary<string, object>();
								long docNumericID = GetDocNumericID(docId);
								if (docNumericID > 0)
								{
									bool deleted;
									IList<bool> outIsDeleted = new AList<bool>();
									IList<bool> outIsConflict = new AList<bool>();
									string revId = WinningRevIDOfDoc(docNumericID, outIsDeleted, outIsConflict);
									if (outIsDeleted.Count > 0)
									{
										deleted = true;
									}
									if (revId != null)
									{
										value.Put("rev", revId);
										value.Put("deleted", true);
									}
								}
								change = new QueryRow((value != null ? docId : null), 0, docId, value, null);
								change.SetDatabase(this);
							}
							rows.AddItem(change);
						}
					}
				}
			}
			catch (SQLException e)
			{
				Log.E(Database.Tag, "Error getting all docs", e);
				throw new CouchbaseLiteException("Error getting all docs", e, new Status(Status.InternalServerError
					));
			}
			finally
			{
				if (cursor != null)
				{
					cursor.Close();
				}
			}
			result.Put("rows", rows);
			result.Put("total_rows", rows.Count);
			result.Put("offset", options.GetSkip());
			if (updateSeq != 0)
			{
				result.Put("update_seq", updateSeq);
			}
			return result;
		}
Exemplo n.º 26
0
		/// <exception cref="System.IO.IOException"></exception>
		private ObjectDirectory.CachedPackList ScanCachedPacks(ObjectDirectory.CachedPackList
			 old)
		{
			FileSnapshot s = FileSnapshot.Save(cachedPacksFile);
			byte[] buf;
			try
			{
				buf = IOUtil.ReadFully(cachedPacksFile);
			}
			catch (FileNotFoundException)
			{
				buf = new byte[0];
			}
			if (old != null && old.snapshot.Equals(s) && Arrays.Equals(old.raw, buf))
			{
				old.snapshot.SetClean(s);
				return old;
			}
			AList<LocalCachedPack> list = new AList<LocalCachedPack>(4);
			ICollection<ObjectId> tips = new HashSet<ObjectId>();
			int ptr = 0;
			while (ptr < buf.Length)
			{
				if (buf[ptr] == '#' || buf[ptr] == '\n')
				{
					ptr = RawParseUtils.NextLF(buf, ptr);
					continue;
				}
				if (buf[ptr] == '+')
				{
					tips.AddItem(ObjectId.FromString(buf, ptr + 2));
					ptr = RawParseUtils.NextLF(buf, ptr + 2);
					continue;
				}
				IList<string> names = new AList<string>(4);
				while (ptr < buf.Length && buf[ptr] == 'P')
				{
					int end = RawParseUtils.NextLF(buf, ptr);
					if (buf[end - 1] == '\n')
					{
						end--;
					}
					names.AddItem(RawParseUtils.Decode(buf, ptr + 2, end));
					ptr = RawParseUtils.NextLF(buf, end);
				}
				if (!tips.IsEmpty() && !names.IsEmpty())
				{
					list.AddItem(new LocalCachedPack(this, tips, names));
					tips = new HashSet<ObjectId>();
				}
			}
			list.TrimToSize();
			return new ObjectDirectory.CachedPackList(s, Sharpen.Collections.UnmodifiableList
				(list), buf);
		}
Exemplo n.º 27
0
		/// <exception cref="NGit.Errors.TransportException"></exception>
		public virtual void Push(ProgressMonitor monitor, IDictionary<string, RemoteRefUpdate
			> refUpdates)
		{
			MarkStartedOperation();
			packNames = null;
			newRefs = new SortedDictionary<string, Ref>(GetRefsMap());
			packedRefUpdates = new AList<RemoteRefUpdate>(refUpdates.Count);
			// Filter the commands and issue all deletes first. This way we
			// can correctly handle a directory being cleared out and a new
			// ref using the directory name being created.
			//
			IList<RemoteRefUpdate> updates = new AList<RemoteRefUpdate>();
			foreach (RemoteRefUpdate u in refUpdates.Values)
			{
				string n = u.GetRemoteName();
				if (!n.StartsWith("refs/") || !Repository.IsValidRefName(n))
				{
					u.SetStatus(RemoteRefUpdate.Status.REJECTED_OTHER_REASON);
					u.SetMessage(JGitText.Get().funnyRefname);
					continue;
				}
				if (AnyObjectId.Equals(ObjectId.ZeroId, u.GetNewObjectId()))
				{
					DeleteCommand(u);
				}
				else
				{
					updates.AddItem(u);
				}
			}
			// If we have any updates we need to upload the objects first, to
			// prevent creating refs pointing at non-existent data. Then we
			// can update the refs, and the info-refs file for dumb transports.
			//
			if (!updates.IsEmpty())
			{
				Sendpack(updates, monitor);
			}
			foreach (RemoteRefUpdate u_1 in updates)
			{
				UpdateCommand(u_1);
			}
			// Is this a new repository? If so we should create additional
			// metadata files so it is properly initialized during the push.
			//
			if (!updates.IsEmpty() && IsNewRepository())
			{
				CreateNewRepository(updates);
			}
			RefWriter refWriter = new _RefWriter_177(this, newRefs.Values);
			if (!packedRefUpdates.IsEmpty())
			{
				try
				{
					refWriter.WritePackedRefs();
					foreach (RemoteRefUpdate u_2 in packedRefUpdates)
					{
						u_2.SetStatus(RemoteRefUpdate.Status.OK);
					}
				}
				catch (IOException err)
				{
					foreach (RemoteRefUpdate u_2 in packedRefUpdates)
					{
						u_2.SetStatus(RemoteRefUpdate.Status.REJECTED_OTHER_REASON);
						u_2.SetMessage(err.Message);
					}
					throw new TransportException(uri, JGitText.Get().failedUpdatingRefs, err);
				}
			}
			try
			{
				refWriter.WriteInfoRefs();
			}
			catch (IOException err)
			{
				throw new TransportException(uri, JGitText.Get().failedUpdatingRefs, err);
			}
		}
        /// <exception cref="NGit.Errors.TransportException"></exception>
        public virtual void Push(ProgressMonitor monitor, IDictionary <string, RemoteRefUpdate
                                                                       > refUpdates)
        {
            MarkStartedOperation();
            packNames        = null;
            newRefs          = new SortedDictionary <string, Ref>(GetRefsMap());
            packedRefUpdates = new AList <RemoteRefUpdate>(refUpdates.Count);
            // Filter the commands and issue all deletes first. This way we
            // can correctly handle a directory being cleared out and a new
            // ref using the directory name being created.
            //
            IList <RemoteRefUpdate> updates = new AList <RemoteRefUpdate>();

            foreach (RemoteRefUpdate u in refUpdates.Values)
            {
                string n = u.GetRemoteName();
                if (!n.StartsWith("refs/") || !Repository.IsValidRefName(n))
                {
                    u.SetStatus(RemoteRefUpdate.Status.REJECTED_OTHER_REASON);
                    u.SetMessage(JGitText.Get().funnyRefname);
                    continue;
                }
                if (AnyObjectId.Equals(ObjectId.ZeroId, u.GetNewObjectId()))
                {
                    DeleteCommand(u);
                }
                else
                {
                    updates.AddItem(u);
                }
            }
            // If we have any updates we need to upload the objects first, to
            // prevent creating refs pointing at non-existent data. Then we
            // can update the refs, and the info-refs file for dumb transports.
            //
            if (!updates.IsEmpty())
            {
                Sendpack(updates, monitor);
            }
            foreach (RemoteRefUpdate u_1 in updates)
            {
                UpdateCommand(u_1);
            }
            // Is this a new repository? If so we should create additional
            // metadata files so it is properly initialized during the push.
            //
            if (!updates.IsEmpty() && IsNewRepository())
            {
                CreateNewRepository(updates);
            }
            RefWriter refWriter = new _RefWriter_179(this, newRefs.Values);

            if (!packedRefUpdates.IsEmpty())
            {
                try
                {
                    refWriter.WritePackedRefs();
                    foreach (RemoteRefUpdate u_2 in packedRefUpdates)
                    {
                        u_2.SetStatus(RemoteRefUpdate.Status.OK);
                    }
                }
                catch (IOException err)
                {
                    foreach (RemoteRefUpdate u_2 in packedRefUpdates)
                    {
                        u_2.SetStatus(RemoteRefUpdate.Status.REJECTED_OTHER_REASON);
                        u_2.SetMessage(err.Message);
                    }
                    throw new TransportException(uri, JGitText.Get().failedUpdatingRefs, err);
                }
            }
            try
            {
                refWriter.WriteInfoRefs();
            }
            catch (IOException err)
            {
                throw new TransportException(uri, JGitText.Get().failedUpdatingRefs, err);
            }
        }
Exemplo n.º 29
0
        /// <exception cref="Apache.Http.HttpException"></exception>
        /// <exception cref="System.IO.IOException"></exception>
        public virtual void Process(IHttpRequest request, HttpContext context)
        {
            Args.NotNull(request, "HTTP request");
            Args.NotNull(context, "HTTP context");
            string method = request.GetRequestLine().GetMethod();

            if (Sharpen.Runtime.EqualsIgnoreCase(method, "CONNECT"))
            {
                return;
            }
            HttpClientContext clientContext = ((HttpClientContext)HttpClientContext.Adapt(context
                                                                                          ));
            // Obtain cookie store
            CookieStore cookieStore = clientContext.GetCookieStore();

            if (cookieStore == null)
            {
                this.log.Debug("Cookie store not specified in HTTP context");
                return;
            }
            // Obtain the registry of cookie specs
            Lookup <CookieSpecProvider> registry = clientContext.GetCookieSpecRegistry();

            if (registry == null)
            {
                this.log.Debug("CookieSpec registry not specified in HTTP context");
                return;
            }
            // Obtain the target host, possibly virtual (required)
            HttpHost targetHost = clientContext.GetTargetHost();

            if (targetHost == null)
            {
                this.log.Debug("Target host not set in the context");
                return;
            }
            // Obtain the route (required)
            RouteInfo route = clientContext.GetHttpRoute();

            if (route == null)
            {
                this.log.Debug("Connection route not set in the context");
                return;
            }
            RequestConfig config = clientContext.GetRequestConfig();
            string        policy = config.GetCookieSpec();

            if (policy == null)
            {
                policy = CookieSpecs.BestMatch;
            }
            if (this.log.IsDebugEnabled())
            {
                this.log.Debug("CookieSpec selected: " + policy);
            }
            URI requestURI = null;

            if (request is IHttpUriRequest)
            {
                requestURI = ((IHttpUriRequest)request).GetURI();
            }
            else
            {
                try
                {
                    requestURI = new URI(request.GetRequestLine().GetUri());
                }
                catch (URISyntaxException)
                {
                }
            }
            string path = requestURI != null?requestURI.GetPath() : null;

            string hostName = targetHost.GetHostName();
            int    port     = targetHost.GetPort();

            if (port < 0)
            {
                port = route.GetTargetHost().GetPort();
            }
            CookieOrigin cookieOrigin = new CookieOrigin(hostName, port >= 0 ? port : 0, !TextUtils
                                                         .IsEmpty(path) ? path : "/", route.IsSecure());
            // Get an instance of the selected cookie policy
            CookieSpecProvider provider = registry.Lookup(policy);

            if (provider == null)
            {
                throw new HttpException("Unsupported cookie policy: " + policy);
            }
            CookieSpec cookieSpec = provider.Create(clientContext);
            // Get all cookies available in the HTTP state
            IList <Apache.Http.Cookie.Cookie> cookies = new AList <Apache.Http.Cookie.Cookie>(cookieStore
                                                                                              .GetCookies());
            // Find cookies matching the given origin
            IList <Apache.Http.Cookie.Cookie> matchedCookies = new AList <Apache.Http.Cookie.Cookie
                                                                          >();
            DateTime now = new DateTime();

            foreach (Apache.Http.Cookie.Cookie cookie in cookies)
            {
                if (!cookie.IsExpired(now))
                {
                    if (cookieSpec.Match(cookie, cookieOrigin))
                    {
                        if (this.log.IsDebugEnabled())
                        {
                            this.log.Debug("Cookie " + cookie + " match " + cookieOrigin);
                        }
                        matchedCookies.AddItem(cookie);
                    }
                }
                else
                {
                    if (this.log.IsDebugEnabled())
                    {
                        this.log.Debug("Cookie " + cookie + " expired");
                    }
                }
            }
            // Generate Cookie request headers
            if (!matchedCookies.IsEmpty())
            {
                IList <Header> headers = cookieSpec.FormatCookies(matchedCookies);
                foreach (Header header in headers)
                {
                    request.AddHeader(header);
                }
            }
            int ver = cookieSpec.GetVersion();

            if (ver > 0)
            {
                bool needVersionHeader = false;
                foreach (Apache.Http.Cookie.Cookie cookie_1 in matchedCookies)
                {
                    if (ver != cookie_1.GetVersion() || !(cookie_1 is SetCookie2))
                    {
                        needVersionHeader = true;
                    }
                }
                if (needVersionHeader)
                {
                    Header header = cookieSpec.GetVersionHeader();
                    if (header != null)
                    {
                        // Advertise cookie version support
                        request.AddHeader(header);
                    }
                }
            }
            // Stick the CookieSpec and CookieOrigin instances to the HTTP context
            // so they could be obtained by the response interceptor
            context.SetAttribute(HttpClientContext.CookieSpec, cookieSpec);
            context.SetAttribute(HttpClientContext.CookieOrigin, cookieOrigin);
        }
Exemplo n.º 30
0
        /// <summary>
        /// Stash the contents on the working directory and index in separate commits
        /// and reset to the current HEAD commit.
        /// </summary>
        /// <remarks>
        /// Stash the contents on the working directory and index in separate commits
        /// and reset to the current HEAD commit.
        /// </remarks>
        /// <returns>stashed commit or null if no changes to stash</returns>
        /// <exception cref="NGit.Api.Errors.GitAPIException">NGit.Api.Errors.GitAPIException
        ///     </exception>
        public override RevCommit Call()
        {
            CheckCallable();
            Ref          head   = GetHead();
            ObjectReader reader = repo.NewObjectReader();

            try
            {
                RevCommit      headCommit = ParseCommit(reader, head.GetObjectId());
                DirCache       cache      = repo.LockDirCache();
                ObjectInserter inserter   = repo.NewObjectInserter();
                ObjectId       commitId;
                try
                {
                    TreeWalk treeWalk = new TreeWalk(reader);
                    treeWalk.Recursive = true;
                    treeWalk.AddTree(headCommit.Tree);
                    treeWalk.AddTree(new DirCacheIterator(cache));
                    treeWalk.AddTree(new FileTreeIterator(repo));
                    treeWalk.Filter = AndTreeFilter.Create(new SkipWorkTreeFilter(1), new IndexDiffFilter
                                                               (1, 2));
                    // Return null if no local changes to stash
                    if (!treeWalk.Next())
                    {
                        return(null);
                    }
                    MutableObjectId id = new MutableObjectId();
                    IList <DirCacheEditor.PathEdit> wtEdits = new AList <DirCacheEditor.PathEdit>();
                    IList <string> wtDeletes = new AList <string>();
                    do
                    {
                        AbstractTreeIterator headIter  = treeWalk.GetTree <AbstractTreeIterator>(0);
                        DirCacheIterator     indexIter = treeWalk.GetTree <DirCacheIterator>(1);
                        WorkingTreeIterator  wtIter    = treeWalk.GetTree <WorkingTreeIterator>(2);
                        if (headIter != null && indexIter != null && wtIter != null)
                        {
                            if (!indexIter.GetDirCacheEntry().IsMerged())
                            {
                                throw new UnmergedPathsException(new UnmergedPathException(indexIter.GetDirCacheEntry
                                                                                               ()));
                            }
                            if (wtIter.IdEqual(indexIter) || wtIter.IdEqual(headIter))
                            {
                                continue;
                            }
                            treeWalk.GetObjectId(id, 0);
                            DirCacheEntry entry = new DirCacheEntry(treeWalk.RawPath);
                            entry.SetLength(wtIter.GetEntryLength());
                            entry.LastModified = wtIter.GetEntryLastModified();
                            entry.FileMode     = wtIter.EntryFileMode;
                            long        contentLength = wtIter.GetEntryContentLength();
                            InputStream @in           = wtIter.OpenEntryStream();
                            try
                            {
                                entry.SetObjectId(inserter.Insert(Constants.OBJ_BLOB, contentLength, @in));
                            }
                            finally
                            {
                                @in.Close();
                            }
                            wtEdits.AddItem(new _PathEdit_273(entry, entry));
                        }
                        else
                        {
                            if (indexIter == null)
                            {
                                wtDeletes.AddItem(treeWalk.PathString);
                            }
                            else
                            {
                                if (wtIter == null && headIter != null)
                                {
                                    wtDeletes.AddItem(treeWalk.PathString);
                                }
                            }
                        }
                    }while (treeWalk.Next());
                    string branch = Repository.ShortenRefName(head.GetTarget().GetName());
                    // Commit index changes
                    NGit.CommitBuilder builder = CreateBuilder(headCommit);
                    builder.TreeId  = cache.WriteTree(inserter);
                    builder.Message = MessageFormat.Format(indexMessage, branch, headCommit.Abbreviate
                                                               (7).Name, headCommit.GetShortMessage());
                    ObjectId indexCommit = inserter.Insert(builder);
                    // Commit working tree changes
                    if (!wtEdits.IsEmpty() || !wtDeletes.IsEmpty())
                    {
                        DirCacheEditor editor = cache.Editor();
                        foreach (DirCacheEditor.PathEdit edit in wtEdits)
                        {
                            editor.Add(edit);
                        }
                        foreach (string path in wtDeletes)
                        {
                            editor.Add(new DirCacheEditor.DeletePath(path));
                        }
                        editor.Finish();
                    }
                    builder.AddParentId(indexCommit);
                    builder.Message = MessageFormat.Format(workingDirectoryMessage, branch, headCommit
                                                           .Abbreviate(7).Name, headCommit.GetShortMessage());
                    builder.TreeId = cache.WriteTree(inserter);
                    commitId       = inserter.Insert(builder);
                    inserter.Flush();
                    UpdateStashRef(commitId, builder.Author, builder.Message);
                }
                finally
                {
                    inserter.Release();
                    cache.Unlock();
                }
                // Hard reset to HEAD
                new ResetCommand(repo).SetMode(ResetCommand.ResetType.HARD).Call();
                // Return stashed commit
                return(ParseCommit(reader, commitId));
            }
            catch (IOException e)
            {
                throw new JGitInternalException(JGitText.Get().stashFailed, e);
            }
            finally
            {
                reader.Release();
            }
        }
		/// <summary>Construct the merge commit message.</summary>
		/// <remarks>Construct the merge commit message.</remarks>
		/// <param name="refsToMerge">the refs which will be merged</param>
		/// <param name="target">the branch ref which will be merged into</param>
		/// <returns>merge commit message</returns>
		public virtual string Format(IList<Ref> refsToMerge, Ref target)
		{
			StringBuilder sb = new StringBuilder();
			sb.Append("Merge ");
			IList<string> branches = new AList<string>();
			IList<string> remoteBranches = new AList<string>();
			IList<string> tags = new AList<string>();
			IList<string> commits = new AList<string>();
			IList<string> others = new AList<string>();
			foreach (Ref @ref in refsToMerge)
			{
				if (@ref.GetName().StartsWith(Constants.R_HEADS))
				{
					branches.AddItem("'" + Repository.ShortenRefName(@ref.GetName()) + "'");
				}
				else
				{
					if (@ref.GetName().StartsWith(Constants.R_REMOTES))
					{
						remoteBranches.AddItem("'" + Repository.ShortenRefName(@ref.GetName()) + "'");
					}
					else
					{
						if (@ref.GetName().StartsWith(Constants.R_TAGS))
						{
							tags.AddItem("'" + Repository.ShortenRefName(@ref.GetName()) + "'");
						}
						else
						{
							if (@ref.GetName().Equals(@ref.GetObjectId().GetName()))
							{
								commits.AddItem("'" + @ref.GetName() + "'");
							}
							else
							{
								others.AddItem(@ref.GetName());
							}
						}
					}
				}
			}
			IList<string> listings = new AList<string>();
			if (!branches.IsEmpty())
			{
				listings.AddItem(JoinNames(branches, "branch", "branches"));
			}
			if (!remoteBranches.IsEmpty())
			{
				listings.AddItem(JoinNames(remoteBranches, "remote branch", "remote branches"));
			}
			if (!tags.IsEmpty())
			{
				listings.AddItem(JoinNames(tags, "tag", "tags"));
			}
			if (!commits.IsEmpty())
			{
				listings.AddItem(JoinNames(commits, "commit", "commits"));
			}
			if (!others.IsEmpty())
			{
				listings.AddItem(StringUtils.Join(others, ", ", " and "));
			}
			sb.Append(StringUtils.Join(listings, ", "));
			string targetName = target.GetLeaf().GetName();
			if (!targetName.Equals(Constants.R_HEADS + Constants.MASTER))
			{
				string targetShortName = Repository.ShortenRefName(target.GetName());
				sb.Append(" into " + targetShortName);
			}
			return sb.ToString();
		}
Exemplo n.º 32
0
        public virtual void TestUpgradeFromRel2ReservedImage()
        {
            UnpackStorage(Hadoop2ReservedImage, HadoopDfsDirTxt);
            MiniDFSCluster cluster = null;
            // Try it once without setting the upgrade flag to ensure it fails
            Configuration conf = new Configuration();

            try
            {
                cluster = new MiniDFSCluster.Builder(conf).Format(false).StartupOption(HdfsServerConstants.StartupOption
                                                                                       .Upgrade).NumDataNodes(0).Build();
            }
            catch (ArgumentException e)
            {
                GenericTestUtils.AssertExceptionContains("reserved path component in this version"
                                                         , e);
            }
            finally
            {
                if (cluster != null)
                {
                    cluster.Shutdown();
                }
            }
            // Try it again with a custom rename string
            try
            {
                FSImageFormat.SetRenameReservedPairs(".snapshot=.user-snapshot," + ".reserved=.my-reserved"
                                                     );
                cluster = new MiniDFSCluster.Builder(conf).Format(false).StartupOption(HdfsServerConstants.StartupOption
                                                                                       .Upgrade).NumDataNodes(0).Build();
                DistributedFileSystem dfs = cluster.GetFileSystem();
                // Make sure the paths were renamed as expected
                // Also check that paths are present after a restart, checks that the
                // upgraded fsimage has the same state.
                string[] expected = new string[] { "/edits", "/edits/.reserved", "/edits/.user-snapshot"
                                                   , "/edits/.user-snapshot/editsdir", "/edits/.user-snapshot/editsdir/editscontents"
                                                   , "/edits/.user-snapshot/editsdir/editsdir2", "/image", "/image/.reserved", "/image/.user-snapshot"
                                                   , "/image/.user-snapshot/imagedir", "/image/.user-snapshot/imagedir/imagecontents"
                                                   , "/image/.user-snapshot/imagedir/imagedir2", "/.my-reserved", "/.my-reserved/edits-touch"
                                                   , "/.my-reserved/image-touch" };
                for (int i = 0; i < 2; i++)
                {
                    // Restart the second time through this loop
                    if (i == 1)
                    {
                        cluster.FinalizeCluster(conf);
                        cluster.RestartNameNode(true);
                    }
                    AList <Path> toList = new AList <Path>();
                    toList.AddItem(new Path("/"));
                    AList <string> found = new AList <string>();
                    while (!toList.IsEmpty())
                    {
                        Path         p        = toList.Remove(0);
                        FileStatus[] statuses = dfs.ListStatus(p);
                        foreach (FileStatus status in statuses)
                        {
                            string path = status.GetPath().ToUri().GetPath();
                            System.Console.Out.WriteLine("Found path " + path);
                            found.AddItem(path);
                            if (status.IsDirectory())
                            {
                                toList.AddItem(status.GetPath());
                            }
                        }
                    }
                    foreach (string s in expected)
                    {
                        NUnit.Framework.Assert.IsTrue("Did not find expected path " + s, found.Contains(s
                                                                                                        ));
                    }
                    NUnit.Framework.Assert.AreEqual("Found an unexpected path while listing filesystem"
                                                    , found.Count, expected.Length);
                }
            }
            finally
            {
                if (cluster != null)
                {
                    cluster.Shutdown();
                }
            }
        }
Exemplo n.º 33
0
        /// <exception cref="System.IO.IOException"/>
        public virtual FileStatus[] Glob()
        {
            // First we get the scheme and authority of the pattern that was passed
            // in.
            string scheme    = SchemeFromPath(pathPattern);
            string authority = AuthorityFromPath(pathPattern);
            // Next we strip off everything except the pathname itself, and expand all
            // globs.  Expansion is a process which turns "grouping" clauses,
            // expressed as brackets, into separate path patterns.
            string         pathPatternString = pathPattern.ToUri().GetPath();
            IList <string> flattenedPatterns = GlobExpander.Expand(pathPatternString);
            // Now loop over all flattened patterns.  In every case, we'll be trying to
            // match them to entries in the filesystem.
            AList <FileStatus> results = new AList <FileStatus>(flattenedPatterns.Count);
            bool sawWildcard           = false;

            foreach (string flatPattern in flattenedPatterns)
            {
                // Get the absolute path for this flattened pattern.  We couldn't do
                // this prior to flattening because of patterns like {/,a}, where which
                // path you go down influences how the path must be made absolute.
                Path absPattern = FixRelativePart(new Path(flatPattern.IsEmpty() ? Path.CurDir :
                                                           flatPattern));
                // Now we break the flattened, absolute pattern into path components.
                // For example, /a/*/c would be broken into the list [a, *, c]
                IList <string> components = GetPathComponents(absPattern.ToUri().GetPath());
                // Starting out at the root of the filesystem, we try to match
                // filesystem entries against pattern components.
                AList <FileStatus> candidates = new AList <FileStatus>(1);
                // To get the "real" FileStatus of root, we'd have to do an expensive
                // RPC to the NameNode.  So we create a placeholder FileStatus which has
                // the correct path, but defaults for the rest of the information.
                // Later, if it turns out we actually want the FileStatus of root, we'll
                // replace the placeholder with a real FileStatus obtained from the
                // NameNode.
                FileStatus rootPlaceholder;
                if (Path.Windows && !components.IsEmpty() && Path.IsWindowsAbsolutePath(absPattern
                                                                                        .ToUri().GetPath(), true))
                {
                    // On Windows the path could begin with a drive letter, e.g. /E:/foo.
                    // We will skip matching the drive letter and start from listing the
                    // root of the filesystem on that drive.
                    string driveLetter = components.Remove(0);
                    rootPlaceholder = new FileStatus(0, true, 0, 0, 0, new Path(scheme, authority, Path
                                                                                .Separator + driveLetter + Path.Separator));
                }
                else
                {
                    rootPlaceholder = new FileStatus(0, true, 0, 0, 0, new Path(scheme, authority, Path
                                                                                .Separator));
                }
                candidates.AddItem(rootPlaceholder);
                for (int componentIdx = 0; componentIdx < components.Count; componentIdx++)
                {
                    AList <FileStatus> newCandidates = new AList <FileStatus>(candidates.Count);
                    GlobFilter         globFilter    = new GlobFilter(components[componentIdx]);
                    string             component     = UnescapePathComponent(components[componentIdx]);
                    if (globFilter.HasPattern())
                    {
                        sawWildcard = true;
                    }
                    if (candidates.IsEmpty() && sawWildcard)
                    {
                        // Optimization: if there are no more candidates left, stop examining
                        // the path components.  We can only do this if we've already seen
                        // a wildcard component-- otherwise, we still need to visit all path
                        // components in case one of them is a wildcard.
                        break;
                    }
                    if ((componentIdx < components.Count - 1) && (!globFilter.HasPattern()))
                    {
                        // Optimization: if this is not the terminal path component, and we
                        // are not matching against a glob, assume that it exists.  If it
                        // doesn't exist, we'll find out later when resolving a later glob
                        // or the terminal path component.
                        foreach (FileStatus candidate in candidates)
                        {
                            candidate.SetPath(new Path(candidate.GetPath(), component));
                        }
                        continue;
                    }
                    foreach (FileStatus candidate_1 in candidates)
                    {
                        if (globFilter.HasPattern())
                        {
                            FileStatus[] children = ListStatus(candidate_1.GetPath());
                            if (children.Length == 1)
                            {
                                // If we get back only one result, this could be either a listing
                                // of a directory with one entry, or it could reflect the fact
                                // that what we listed resolved to a file.
                                //
                                // Unfortunately, we can't just compare the returned paths to
                                // figure this out.  Consider the case where you have /a/b, where
                                // b is a symlink to "..".  In that case, listing /a/b will give
                                // back "/a/b" again.  If we just went by returned pathname, we'd
                                // incorrectly conclude that /a/b was a file and should not match
                                // /a/*/*.  So we use getFileStatus of the path we just listed to
                                // disambiguate.
                                if (!GetFileStatus(candidate_1.GetPath()).IsDirectory())
                                {
                                    continue;
                                }
                            }
                            foreach (FileStatus child in children)
                            {
                                if (componentIdx < components.Count - 1)
                                {
                                    // Don't try to recurse into non-directories.  See HADOOP-10957.
                                    if (!child.IsDirectory())
                                    {
                                        continue;
                                    }
                                }
                                // Set the child path based on the parent path.
                                child.SetPath(new Path(candidate_1.GetPath(), child.GetPath().GetName()));
                                if (globFilter.Accept(child.GetPath()))
                                {
                                    newCandidates.AddItem(child);
                                }
                            }
                        }
                        else
                        {
                            // When dealing with non-glob components, use getFileStatus
                            // instead of listStatus.  This is an optimization, but it also
                            // is necessary for correctness in HDFS, since there are some
                            // special HDFS directories like .reserved and .snapshot that are
                            // not visible to listStatus, but which do exist.  (See HADOOP-9877)
                            FileStatus childStatus = GetFileStatus(new Path(candidate_1.GetPath(), component)
                                                                   );
                            if (childStatus != null)
                            {
                                newCandidates.AddItem(childStatus);
                            }
                        }
                    }
                    candidates = newCandidates;
                }
                foreach (FileStatus status in candidates)
                {
                    // Use object equality to see if this status is the root placeholder.
                    // See the explanation for rootPlaceholder above for more information.
                    if (status == rootPlaceholder)
                    {
                        status = GetFileStatus(rootPlaceholder.GetPath());
                        if (status == null)
                        {
                            continue;
                        }
                    }
                    // HADOOP-3497 semantics: the user-defined filter is applied at the
                    // end, once the full path is built up.
                    if (filter.Accept(status.GetPath()))
                    {
                        results.AddItem(status);
                    }
                }
            }

            /*
             * When the input pattern "looks" like just a simple filename, and we
             * can't find it, we return null rather than an empty array.
             * This is a special case which the shell relies on.
             *
             * To be more precise: if there were no results, AND there were no
             * groupings (aka brackets), and no wildcards in the input (aka stars),
             * we return null.
             */
            if ((!sawWildcard) && results.IsEmpty() && (flattenedPatterns.Count <= 1))
            {
                return(null);
            }
            return(Collections.ToArray(results, new FileStatus[0]));
        }
Exemplo n.º 34
0
        /// <summary>Construct the merge commit message.</summary>
        /// <remarks>Construct the merge commit message.</remarks>
        /// <param name="refsToMerge">the refs which will be merged</param>
        /// <param name="target">the branch ref which will be merged into</param>
        /// <returns>merge commit message</returns>
        public virtual string Format(IList <Ref> refsToMerge, Ref target)
        {
            StringBuilder sb = new StringBuilder();

            sb.Append("Merge ");
            IList <string> branches       = new AList <string>();
            IList <string> remoteBranches = new AList <string>();
            IList <string> tags           = new AList <string>();
            IList <string> commits        = new AList <string>();
            IList <string> others         = new AList <string>();

            foreach (Ref @ref in refsToMerge)
            {
                if (@ref.GetName().StartsWith(Constants.R_HEADS))
                {
                    branches.AddItem("'" + Repository.ShortenRefName(@ref.GetName()) + "'");
                }
                else
                {
                    if (@ref.GetName().StartsWith(Constants.R_REMOTES))
                    {
                        remoteBranches.AddItem("'" + Repository.ShortenRefName(@ref.GetName()) + "'");
                    }
                    else
                    {
                        if (@ref.GetName().StartsWith(Constants.R_TAGS))
                        {
                            tags.AddItem("'" + Repository.ShortenRefName(@ref.GetName()) + "'");
                        }
                        else
                        {
                            if (@ref.GetName().Equals(@ref.GetObjectId().GetName()))
                            {
                                commits.AddItem("'" + @ref.GetName() + "'");
                            }
                            else
                            {
                                others.AddItem(@ref.GetName());
                            }
                        }
                    }
                }
            }
            IList <string> listings = new AList <string>();

            if (!branches.IsEmpty())
            {
                listings.AddItem(JoinNames(branches, "branch", "branches"));
            }
            if (!remoteBranches.IsEmpty())
            {
                listings.AddItem(JoinNames(remoteBranches, "remote-tracking branch", "remote-tracking branches"
                                           ));
            }
            if (!tags.IsEmpty())
            {
                listings.AddItem(JoinNames(tags, "tag", "tags"));
            }
            if (!commits.IsEmpty())
            {
                listings.AddItem(JoinNames(commits, "commit", "commits"));
            }
            if (!others.IsEmpty())
            {
                listings.AddItem(StringUtils.Join(others, ", ", " and "));
            }
            sb.Append(StringUtils.Join(listings, ", "));
            string targetName = target.GetLeaf().GetName();

            if (!targetName.Equals(Constants.R_HEADS + Constants.MASTER))
            {
                string targetShortName = Repository.ShortenRefName(targetName);
                sb.Append(" into " + targetShortName);
            }
            return(sb.ToString());
        }
Exemplo n.º 35
0
        /// <exception cref="System.NotSupportedException"></exception>
        /// <exception cref="NGit.Errors.TransportException"></exception>
        private void ExecuteImp(ProgressMonitor monitor, FetchResult result)
        {
            conn = transport.OpenFetch();
            try
            {
                result.SetAdvertisedRefs(transport.GetURI(), conn.GetRefsMap());
                ICollection <Ref> matched = new HashSet <Ref>();
                foreach (RefSpec spec in toFetch)
                {
                    if (spec.GetSource() == null)
                    {
                        throw new TransportException(MessageFormat.Format(JGitText.Get().sourceRefNotSpecifiedForRefspec
                                                                          , spec));
                    }
                    if (spec.IsWildcard())
                    {
                        ExpandWildcard(spec, matched);
                    }
                    else
                    {
                        ExpandSingle(spec, matched);
                    }
                }
                ICollection <Ref> additionalTags = Sharpen.Collections.EmptyList <Ref>();
                TagOpt            tagopt         = transport.GetTagOpt();
                if (tagopt == TagOpt.AUTO_FOLLOW)
                {
                    additionalTags = ExpandAutoFollowTags();
                }
                else
                {
                    if (tagopt == TagOpt.FETCH_TAGS)
                    {
                        ExpandFetchTags();
                    }
                }
                bool includedTags;
                if (!askFor.IsEmpty() && !AskForIsComplete())
                {
                    FetchObjects(monitor);
                    includedTags = conn.DidFetchIncludeTags();
                    // Connection was used for object transfer. If we
                    // do another fetch we must open a new connection.
                    //
                    CloseConnection(result);
                }
                else
                {
                    includedTags = false;
                }
                if (tagopt == TagOpt.AUTO_FOLLOW && !additionalTags.IsEmpty())
                {
                    // There are more tags that we want to follow, but
                    // not all were asked for on the initial request.
                    //
                    Sharpen.Collections.AddAll(have, askFor.Keys);
                    askFor.Clear();
                    foreach (Ref r in additionalTags)
                    {
                        ObjectId id = r.GetPeeledObjectId();
                        if (id == null || transport.local.HasObject(id))
                        {
                            WantTag(r);
                        }
                    }
                    if (!askFor.IsEmpty() && (!includedTags || !AskForIsComplete()))
                    {
                        ReopenConnection();
                        if (!askFor.IsEmpty())
                        {
                            FetchObjects(monitor);
                        }
                    }
                }
            }
            finally
            {
                CloseConnection(result);
            }
            RevWalk walk = new RevWalk(transport.local);

            try
            {
                if (transport.IsRemoveDeletedRefs())
                {
                    DeleteStaleTrackingRefs(result, walk);
                }
                foreach (TrackingRefUpdate u in localUpdates)
                {
                    try
                    {
                        u.Update(walk);
                        result.Add(u);
                    }
                    catch (IOException err)
                    {
                        throw new TransportException(MessageFormat.Format(JGitText.Get().failureUpdatingTrackingRef
                                                                          , u.GetLocalName(), err.Message), err);
                    }
                }
            }
            finally
            {
                walk.Release();
            }
            if (!fetchHeadUpdates.IsEmpty())
            {
                try
                {
                    UpdateFETCH_HEAD(result);
                }
                catch (IOException err)
                {
                    throw new TransportException(MessageFormat.Format(JGitText.Get().failureUpdatingFETCH_HEAD
                                                                      , err.Message), err);
                }
            }
        }