<?xml version="1.0" encoding="UTF-8"?>
<rss xmlns:content="http://purl.org/rss/1.0/modules/content/" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:taxo="http://purl.org/rss/1.0/modules/taxonomy/" version="2.0">
  <channel>
    <title>topic HF20 de-dupe statistics in Array Performance and Data Protection</title>
    <link>https://community.hpe.com/t5/array-performance-and-data/hf20-de-dupe-statistics/m-p/7034291#M1201</link>
    <description>&lt;P&gt;Hi,&lt;/P&gt;&lt;P&gt;Why is it not possible to see different volumes dedupe stastics?&lt;/P&gt;&lt;P&gt;Regards&lt;BR /&gt;Robert&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;</description>
    <pubDate>Mon, 11 Feb 2019 13:19:13 GMT</pubDate>
    <dc:creator>RVil</dc:creator>
    <dc:date>2019-02-11T13:19:13Z</dc:date>
    <item>
      <title>HF20 de-dupe statistics</title>
      <link>https://community.hpe.com/t5/array-performance-and-data/hf20-de-dupe-statistics/m-p/7034291#M1201</link>
      <description>&lt;P&gt;Hi,&lt;/P&gt;&lt;P&gt;Why is it not possible to see different volumes dedupe stastics?&lt;/P&gt;&lt;P&gt;Regards&lt;BR /&gt;Robert&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;</description>
      <pubDate>Mon, 11 Feb 2019 13:19:13 GMT</pubDate>
      <guid>https://community.hpe.com/t5/array-performance-and-data/hf20-de-dupe-statistics/m-p/7034291#M1201</guid>
      <dc:creator>RVil</dc:creator>
      <dc:date>2019-02-11T13:19:13Z</dc:date>
    </item>
    <item>
      <title>Re: HF20 de-dupe statistics</title>
      <link>https://community.hpe.com/t5/array-performance-and-data/hf20-de-dupe-statistics/m-p/7034301#M1202</link>
      <description>&lt;P&gt;Hello!&lt;/P&gt;&lt;P&gt;Deduplication is performed at the Application Category level, and thus the dedupe results cannot be reported on a per-volume basis as there could be many blocks shared across many volumes that are part of the same Application Category.&lt;/P&gt;&lt;P&gt;If you head to Monitor-&amp;gt;Capacity, you can see the dedupe/compression savings on the App Category level there. Alternatively, the overall Pool Capacity figures also include the global dedupe/compression savings.&lt;/P&gt;</description>
      <pubDate>Mon, 11 Feb 2019 14:31:16 GMT</pubDate>
      <guid>https://community.hpe.com/t5/array-performance-and-data/hf20-de-dupe-statistics/m-p/7034301#M1202</guid>
      <dc:creator>Nick_Dyer</dc:creator>
      <dc:date>2019-02-11T14:31:16Z</dc:date>
    </item>
    <item>
      <title>Re: HF20 de-dupe statistics</title>
      <link>https://community.hpe.com/t5/array-performance-and-data/hf20-de-dupe-statistics/m-p/7034393#M1205</link>
      <description>&lt;P&gt;And what exactly do "Application Category level" mean in this case?&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;</description>
      <pubDate>Tue, 12 Feb 2019 08:00:41 GMT</pubDate>
      <guid>https://community.hpe.com/t5/array-performance-and-data/hf20-de-dupe-statistics/m-p/7034393#M1205</guid>
      <dc:creator>RVil</dc:creator>
      <dc:date>2019-02-12T08:00:41Z</dc:date>
    </item>
    <item>
      <title>Re: HF20 de-dupe statistics</title>
      <link>https://community.hpe.com/t5/array-performance-and-data/hf20-de-dupe-statistics/m-p/7034457#M1206</link>
      <description>&lt;P&gt;Good question. There's an indepth view of deduplication on Nimble available here:&amp;nbsp;&lt;A href="https://community.hpe.com/t5/HPE-Storage-Tech-Insiders/Dedupe-An-Engineer-Looks-Under-the-Hood/ba-p/6986460" target="_blank"&gt;https://community.hpe.com/t5/HPE-Storage-Tech-Insiders/Dedupe-An-Engineer-Looks-Under-the-Hood/ba-p/6986460&lt;/A&gt;&lt;/P&gt;</description>
      <pubDate>Tue, 12 Feb 2019 16:27:28 GMT</pubDate>
      <guid>https://community.hpe.com/t5/array-performance-and-data/hf20-de-dupe-statistics/m-p/7034457#M1206</guid>
      <dc:creator>Nick_Dyer</dc:creator>
      <dc:date>2019-02-12T16:27:28Z</dc:date>
    </item>
    <item>
      <title>Re: HF20 de-dupe statistics</title>
      <link>https://community.hpe.com/t5/array-performance-and-data/hf20-de-dupe-statistics/m-p/7034669#M1210</link>
      <description>&lt;P&gt;Then it´s only bad, i have all my volumes set as Virtual Server, as all my volumes is Hyper V attached drives.&lt;/P&gt;</description>
      <pubDate>Thu, 14 Feb 2019 07:45:36 GMT</pubDate>
      <guid>https://community.hpe.com/t5/array-performance-and-data/hf20-de-dupe-statistics/m-p/7034669#M1210</guid>
      <dc:creator>RVil</dc:creator>
      <dc:date>2019-02-14T07:45:36Z</dc:date>
    </item>
    <item>
      <title>Re: HF20 de-dupe statistics</title>
      <link>https://community.hpe.com/t5/array-performance-and-data/hf20-de-dupe-statistics/m-p/7034724#M1212</link>
      <description>&lt;P&gt;That's ok! If they're CSV attached shared drives, then that's exactly how it should be.&lt;/P&gt;</description>
      <pubDate>Thu, 14 Feb 2019 12:44:39 GMT</pubDate>
      <guid>https://community.hpe.com/t5/array-performance-and-data/hf20-de-dupe-statistics/m-p/7034724#M1212</guid>
      <dc:creator>Nick_Dyer</dc:creator>
      <dc:date>2019-02-14T12:44:39Z</dc:date>
    </item>
    <item>
      <title>Re: HF20 de-dupe statistics</title>
      <link>https://community.hpe.com/t5/array-performance-and-data/hf20-de-dupe-statistics/m-p/7034730#M1213</link>
      <description>&lt;P&gt;I know. I ment, the application statistics is not worth much, as all is reported as the same application :)&lt;/P&gt;</description>
      <pubDate>Thu, 14 Feb 2019 13:59:30 GMT</pubDate>
      <guid>https://community.hpe.com/t5/array-performance-and-data/hf20-de-dupe-statistics/m-p/7034730#M1213</guid>
      <dc:creator>RVil</dc:creator>
      <dc:date>2019-02-14T13:59:30Z</dc:date>
    </item>
  </channel>
</rss>

