2016-09-17 6 views
1

VSTS에서 AzureBlob 파일 복사 작업을 사용하여 SPA 클라이언트가 스타일 시트에서 라이브러리를 소비하는 푸른 blob 저장소에 클라이언트 라이브러리를 배포했습니다.AzureBlob 파일 복사 VSTS에 복사

AzureBlob File Copy는 파일의 콘텐츠 헤더를 설정하지 않으므로 클라이언트가 콘텐츠를 올바르게 사용할 수 없습니다.

이 문제를 해결할 수있는 다른 작업 또는 콘텐츠 형식을 올바르게 업로드하고 설정할 수있는 사용자 지정 작업을 만드는 범위는 무엇입니까? ect js를 응용 프로그램/javascript로, css를 text/css로 변경하십시오.

답변

1

기존 코드를 사용하여 C#으로 직접 작업을 생성했습니다.

using System; 
using System.Collections.Generic; 
using System.ComponentModel; 
using System.ComponentModel.DataAnnotations; 
using System.Diagnostics; 
using System.IO; 
using System.Linq; 
using System.Net; 
using System.Net.Http; 
using System.Reflection; 
using System.Threading; 
using System.Threading.Tasks; 
using System.Threading.Tasks.Dataflow; 
using CommandLine; 
using Microsoft.WindowsAzure.Storage; 
using Microsoft.WindowsAzure.Storage.Auth; 
using Microsoft.WindowsAzure.Storage.Blob; 
using Newtonsoft.Json.Linq; 
using SInnovations.VSTeamServices.TaskBuilder.Attributes; 
using SInnovations.VSTeamServices.TaskBuilder.AzureResourceManager.ResourceTypes; 
using SInnovations.VSTeamServices.TaskBuilder.ConsoleUtils; 
using SInnovations.VSTeamServices.TaskBuilder.ResourceTypes; 
using SInnovations.VSTeamServices.TaskBuilder.Tasks; 

namespace AzureBlobFileCopy 
{ 

    public class ConnectedServiceRelation : PropertyRelation<ProgramOptions, ServiceEndpoint> 
    { 
     public ConnectedServiceRelation() 
      : base(k => k.ConnectedServiceName) 
     { 

     } 
    } 

    [ResourceType(TaskInputType = "pickList")] 
    public class ARMListKey : IConsoleReader<ProgramOptions>, IConsoleExecutor<ProgramOptions> 
    { 
     public void OnConsoleParsing(Parser parser, string[] args, ProgramOptions options, PropertyInfo info) 
     { 
      Id = args[Array.IndexOf(args, "--storage") + 1]; 
     } 

     public void Execute(ProgramOptions options) 
     { 
      var http = options.ConnectedServiceName.GetAuthorizedHttpClient("https://management.azure.com/"); 

      var keys = http.PostAsync($"https://management.azure.com{Id}/listKeys?api-version=2016-01-01", new StringContent(string.Empty)).GetAwaiter().GetResult(); 
      var keysObj = JObject.Parse(keys.Content.ReadAsStringAsync().GetAwaiter().GetResult()); 

      Account = new CloudStorageAccount(new StorageCredentials(Id.Split('/').Last(), keysObj.SelectTokens("$.keys[*].value").First().ToString()), true); 
     } 

     public string Id { get; set; } 

     public CloudStorageAccount Account { get; set; } 

    } 

    [ConnectedServiceRelation(typeof(ConnectedServiceRelation))] 
    [EntryPoint("Uploading to $(storage)")] 
    [Group(DisplayName = "Output", isExpanded = true, Name = "output")] 
    public class ProgramOptions 
    { 

     [Display(ShortName = "source", Name = "Copy Path", Description = "The files that should be copied", ResourceType = typeof(GlobPath))] 
     public GlobPath Source { get; set; } 

     [Required] 
     [Display(Name = "Azure Subscription", ShortName = "ConnectedServiceName", ResourceType = typeof(ServiceEndpoint), Description = "Azure Service Principal to obtain tokens from")] 
     public ServiceEndpoint ConnectedServiceName { get; set; } 

     [Required] 
     [ArmResourceIdPicker("Microsoft.Storage/storageAccounts", "2016-01-01")] 
     [Display(ShortName = "storage", Name = "Storage Account", Description = "The storage account to copy files to", ResourceType = typeof(ARMListKey))] 
     public ARMListKey StorageAccount { get; set; } 


     [Display(Name = "Container Name")] 
     [Option("container", Required = true)] 
     public string ContainerName { get; set; } 

     [Display(Name = "Prefix for uploaded data")] 
     [Option("prefix")] 
     public string Prefix { get; set; } 

     [Display(Name = "Fail if files Exists")] 
     [DefaultValue(true)] 
     [Option("failOnExists")] 
     public bool FailIfFilesExist { get; set; } 

     [Display(Name = "Storage Container Uri", GroupName = "output")] 
     [Option("StorageContainerUri")] 
     public string StorageContainerUri { get; set; } 

     [Display(Name = "Storage Container SAS token", GroupName = "output")] 
     [Option("StorageContainerSASToken")] 
     public string StorageContainerSASToken 
     { 
      get; set; 
     } 


     [Display(Name = "Verbose", Description = "Write out each file thats uploaded")] 
     [Option("Verbose")] 
     public bool Verbose { get; set; } 
    } 
    public class Program 
    { 
     private static readonly CancellationTokenSource cancellationTokenSource = new CancellationTokenSource(); 
     private static readonly ManualResetEvent runCompleteEvent = new ManualResetEvent(false); 

     static void Main(string[] args) 
     { 
#if DEBUG 
    //  args = new[] { "--build" }; 
#endif 
      ServicePointManager.UseNagleAlgorithm = true; 
      ServicePointManager.Expect100Continue = true; 
      ServicePointManager.CheckCertificateRevocationList = true; 
      ServicePointManager.DefaultConnectionLimit = ServicePointManager.DefaultPersistentConnectionLimit * 100; 

      try 
      { 

       RunAsync(ConsoleHelper.ParseAndHandleArguments<ProgramOptions>($"Finding and uploading data", args), 
        cancellationTokenSource.Token).Wait(); 

      } 
      finally 
      { 
       runCompleteEvent.Set(); 
      } 

     } 

     private static async Task RunAsync(ProgramOptions ops, CancellationToken cannelcationtoken) 
     { 

      Console.WriteLine($"Uploading data at {ops.Source} to {ops.StorageAccount.Account.BlobEndpoint} using {ops.Prefix} as prefix in {ops.ContainerName}"); 

      var client = ops.StorageAccount.Account.CreateCloudBlobClient(); 

      var container = client.GetContainerReference(ops.ContainerName); 

      await container.CreateIfNotExistsAsync(); 

      if (ops.FailIfFilesExist) 
      { 
       var uploads = ops.Source.MatchedFiles() 
        .Select(file => Path.Combine(ops.Prefix, file.Substring(ops.Source.Root.Length).TrimStart('/', '\\')).Replace("\\", "/")) 
        .ToLookup(k=>k); 

       foreach(var file in container.ListBlobs(ops.Prefix, true).OfType<CloudBlockBlob>().Select(b => b.Name)) 
       { 
        if (uploads.Contains(file)) 
        { 
         Console.WriteLine("##vso[task.logissue type=error] File Exists: " + file); 
         throw new Exception("File exists: " + file); 
        } 
       } 


      } 



      var actionBlock = new TransformBlock<string, Tuple<string, CloudBlockBlob, TimeSpan>>(async (string file) => 
       { 
        var filestopWatch = Stopwatch.StartNew(); 
        using (var fileStream = File.OpenRead(file)) 
        { 
         var blob = container.GetBlockBlobReference(Path.Combine(ops.Prefix,file.Substring(ops.Source.Root.Length).TrimStart('/','\\')).Replace("\\","/")); 
         blob.Properties.ContentType = Constants.GetContentType(file); 

         using (var writeable = await blob.OpenWriteAsync()) 
         { 
          await fileStream.CopyToAsync(writeable); 
         } 
         return new Tuple<string, CloudBlockBlob, TimeSpan>(file, blob, filestopWatch.Elapsed); 
        } 

       }, new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = 64 }); 

      var i = 0; 
      var completed = new ActionBlock<Tuple<string, CloudBlockBlob, TimeSpan>>((blob) => 
      { 
       if (ops.Verbose) 
       { 
        Console.WriteLine($"Uploaded {blob.Item1} to {blob.Item2.Name} completed in {blob.Item3}"); 
       } 

       Interlocked.Increment(ref i); 
      }); 

      actionBlock.LinkTo(completed, new DataflowLinkOptions { PropagateCompletion = true }); 
      var stopWatch = Stopwatch.StartNew(); 
      foreach (var file in ops.Source.MatchedFiles()) 
      { 

       await actionBlock.SendAsync(file); 
      } 

      actionBlock.Complete(); 

      await completed.Completion; 

      Console.WriteLine($"Uploaded {i} files to {container.Name}{ops.Prefix} in {stopWatch.Elapsed}"); 


      if (!string.IsNullOrEmpty(ops.StorageContainerUri)) 
      { 
       TaskHelper.SetVariable(ops.StorageContainerUri, container.Uri.ToString()); 

      } 
      if (!string.IsNullOrEmpty(ops.StorageContainerSASToken)) 
      { 
       TaskHelper.SetVariable(ops.StorageContainerSASToken, container.GetSharedAccessSignature(new SharedAccessBlobPolicy 
       { 
        SharedAccessExpiryTime = DateTimeOffset.UtcNow.AddHours(2), 
        Permissions = SharedAccessBlobPermissions.Add | SharedAccessBlobPermissions.Create | SharedAccessBlobPermissions.Delete | SharedAccessBlobPermissions.List | SharedAccessBlobPermissions.Read | SharedAccessBlobPermissions.Write 
       }), 
        true); 
      } 

     } 
    } 
} 
0

Powershell을 사용하여 파일에 헤더를 설정할 수 있습니다. 먼저 파일을 복사하고 Azure Powershell 작업을 사용하여 헤더를 설정하십시오. 또는 Powershell에서 직접 업로드하고 헤더를 설정할 수 있습니다.

0

AzureBlob 파일을 복사 뒤에서 AzCopy를 사용하여 어떻게 추가 인수 필드의 값으로 /SetContentType 인수를 추가하는 방법에 대한? https://docs.microsoft.com/en-us/azure/storage/storage-use-azcopy#specify-the-mime-content-type-of-a-destination-blob

+0

풍어 :

나는이 AzCopy 파일 확장자

더 많은 정보를 기반으로 콘텐츠 형식을 설정하는 얻을 것이다 생각합니다. 이것이 가장 쉬운 방법입니다. 필자는 표준 파일 형식보다 융통성이 필요한 경우 쉽게 사용자 지정할 수있는 방법을 보여주기 때문에 대답을 알려줄 것입니다. –