I have faced situation for downloading large blob files from azure server to local folder. It was easy todownload the file from azure server. But if file has more than 2 GB (means larger size) it will got error in the middle of the downloading. When I was facing the issue, I was search on google and not found feasible solution soon that’s why I posted this compress and decompress codes here..
using System.IO.Compression;
Compress files ….
public static void Compress(FileInfo fi)
{
// Get the stream of the source file.
using (FileStream inFile = fi.OpenRead())
{
// Prevent compressing hidden and
// already compressed files.
if ((File.GetAttributes(fi.FullName)
& FileAttributes.Hidden)
!= FileAttributes.Hidden & fi.Extension != “.gz”)
{
///**
//string[] filesplits = fi.FullName.Split(‘.’);
// Create the compressed file.
using (FileStream outFile =
File.Create(fi.FullName + “.gz”))
{
using (GZipStream Compress =
new GZipStream(outFile,
CompressionMode.Compress))
{
// Copy the source file into
// the compression stream.
inFile.CopyTo(Compress);
Console.WriteLine(“Compressed {0} from {1} to {2} bytes.”,
fi.Name, fi.Length.ToString(), outFile.Length.ToString());
}
}
}
}
}
Decompress compressed file..
public void Decompress(FileInfo fi)
{
// Get the stream of the source file.
using (FileStream inFile = fi.OpenRead())
{
// Get original file extension, for example
// “doc” from report.doc.gz.
string origName = curFile.Remove(curFile.Length -
fi.Extension.Length);
//Create the decompressed file.
using (FileStream outFile = File.Create(origName))
{
using (GZipStream Decompress = new GZipStream(inFile,
CompressionMode.Decompress))
{
// Copy the decompression stream
// into the output file.
Decompress.CopyTo(outFile);
Console.WriteLine(“Decompressed: {0}”, fi.Name);
}
}
}
}
No comments:
Post a Comment