cgpt: Rework number of entries calculation

As pointed out by Daniel in CL:234996, the half_size_sectors should have
been checked in both places.

This CL reworks that part of the code to read easier.

BUG=none
BRANCH=none
TEST=unittest

Change-Id: I8faea3b094c375e4fd1a604a8fe759af88943fdf
Reviewed-on: https://chromium-review.googlesource.com/235792
Reviewed-by: Daniel Ehrenberg <dehrenberg@chromium.org>
Tested-by: Nam Nguyen <namnguyen@chromium.org>
Commit-Queue: Nam Nguyen <namnguyen@chromium.org>
This commit is contained in:
Nam T. Nguyen
2014-12-15 11:03:36 -08:00
committed by chrome-internal-fetch
parent 3f4d8d05ba
commit d53b6684a2
2 changed files with 27 additions and 11 deletions

View File

@@ -51,19 +51,29 @@ static int GptCreate(struct drive *drive, CgptCreateParams *params) {
h->number_of_entries = TOTAL_ENTRIES_SIZE / h->size_of_entry;
if (drive->gpt.flags & GPT_FLAG_EXTERNAL) {
// We might have smaller space for the GPT table. Scale accordingly.
size_t half_size_sectors = drive->gpt.gpt_drive_sectors / 2;
if (half_size_sectors < GPT_HEADER_SECTORS) {
Error("Not enough space for a GPT header.\n");
//
// +------+------------+---------------+-----+--------------+-----------+
// | PMBR | Prim. Head | Prim. Entries | ... | Sec. Entries | Sec. Head |
// +------+------------+---------------+-----+--------------+-----------+
//
// Half the size of gpt_drive_sectors must be big enough to hold PMBR +
// GPT Header + Entries Table, though the secondary structures do not
// contain PMBR.
size_t required_headers_size =
(GPT_PMBR_SECTORS + GPT_HEADER_SECTORS) * drive->gpt.sector_bytes;
size_t min_entries_size = MIN_NUMBER_OF_ENTRIES * h->size_of_entry;
size_t required_min_size = required_headers_size + min_entries_size;
size_t half_size =
(drive->gpt.gpt_drive_sectors / 2) * drive->gpt.sector_bytes;
if (half_size < required_min_size) {
Error("Not enough space to store GPT structures. Required %d bytes.\n",
required_min_size * 2);
return -1;
}
half_size_sectors -= (GPT_HEADER_SECTORS + GPT_PMBR_SECTORS);
size_t half_size = half_size_sectors * drive->gpt.sector_bytes;
if (half_size < (MIN_NUMBER_OF_ENTRIES * h->size_of_entry)) {
Error("Not enough space for minimum number of entries.\n");
return -1;
}
if (128 > (half_size / h->size_of_entry)) {
h->number_of_entries = half_size / h->size_of_entry;
size_t max_entries =
(half_size - required_headers_size) / h->size_of_entry;
if (h->number_of_entries > max_entries) {
h->number_of_entries = max_entries;
}
}

View File

@@ -291,6 +291,12 @@ $CGPT find $MTD -t kernel ${DEV} >/dev/null
# Enable write access again to test boundary in off device storage
chmod 600 ${DEV}
# GPT too small
dd if=/dev/zero of=${DEV} bs=5632 count=1
assert_fail $CGPT create -D 1024 ${DEV}
# GPT is just right for 16 entries (512 + 512 + 16 * 128) * 2 = 6144
dd if=/dev/zero of=${DEV} bs=6144 count=1
$CGPT create -D 1024 ${DEV}
# Create a small 8K file to simulate Flash NOR section
dd if=/dev/zero of=${DEV} bs=8K count=1
# Drive size is not multiple of 512