adding an extra disk to a single disk zpool

Status
Not open for further replies.

alienkidmj12

Dabbler
Joined
Oct 16, 2014
Messages
12
i originally set this up on a single 4tb disk, now i want to add a second to create a mirror, the gui wouldnt entertain me so im trying to do it via command line. so far ive done the following

[root@midnight-freenas] ~# gpart create -s gpt /dev/da2
da2 created

[root@midnight-freenas] ~# zpool status
pool: freenas-boot
state: ONLINE
scan: none requested
config:

NAME STATE READ WRITE CKSUM
freenas-boot ONLINE 0 0 0
da0p2 ONLINE 0 0 0

errors: No known data errors

pool: sivesstorage_pool
state: ONLINE
scan: none requested
config:

NAME STATE READ WRITE CKSUM
sivesstorage_pool ONLINE 0 0 0
gptid/622c5f80-5be3-11e5-98d6-000c2972bceb ONLINE 0 0 0

errors: No known data errors
[root@midnight-freenas] ~# zfs list
NAME USED AVAIL REFER MOUNTPOINT
freenas-boot 666M 8.98G 31K none
freenas-boot/ROOT 651M 8.98G 25K none
freenas-boot/ROOT/FreeNAS-9.3-STABLE-201509160044 650M 8.98G 513M /
freenas-boot/ROOT/Initial-Install 1K 8.98G 510M legacy
freenas-boot/ROOT/Pre-FreeNAS-9.3-STABLE-201509022158-846051 1K 8.98G 512M legacy
freenas-boot/ROOT/default 1.25M 8.98G 512M legacy
freenas-boot/grub 13.6M 8.98G 6.79M legacy
sivesstorage_pool 138G 3.38T 96K /mnt/sivesstorage_pool
sivesstorage_pool/.system 72.7M 3.38T 28.6M legacy
sivesstorage_pool/.system/configs-5ece5c906a8f4df886779fae5cade8a5 2.01M 3.38T 2.01M legacy
sivesstorage_pool/.system/cores 11.9M 3.38T 11.9M legacy
sivesstorage_pool/.system/rrd-5ece5c906a8f4df886779fae5cade8a5 96K 3.38T 96K legacy
sivesstorage_pool/.system/samba4 2.86M 3.38T 2.86M legacy
sivesstorage_pool/.system/syslog-5ece5c906a8f4df886779fae5cade8a5 27.3M 3.38T 27.3M legacy
sivesstorage_pool/jails 96K 3.38T 96K /mnt/sivesstorage_pool/jails
sivesstorage_pool/sivesfiles 138G 3.38T 138G /mnt/sivesstorage_pool/sivesfiles
[root@midnight-freenas]

[root@midnight-freenas] ~# zfs list
NAME USED AVAIL REFER MOUNTPOINT
freenas-boot 666M 8.98G 31K none
freenas-boot/ROOT 651M 8.98G 25K none
freenas-boot/ROOT/FreeNAS-9.3-STABLE-201509160044 650M 8.98G 513M /
freenas-boot/ROOT/Initial-Install 1K 8.98G 510M legacy
freenas-boot/ROOT/Pre-FreeNAS-9.3-STABLE-201509022158-846051 1K 8.98G 512M legacy
freenas-boot/ROOT/default 1.25M 8.98G 512M legacy
freenas-boot/grub 13.6M 8.98G 6.79M legacy
sivesstorage_pool 138G 3.38T 96K /mnt/sivesstorage_pool
sivesstorage_pool/.system 72.8M 3.38T 28.6M legacy
sivesstorage_pool/.system/configs-5ece5c906a8f4df886779fae5cade8a5 2.01M 3.38T 2.01M legacy
sivesstorage_pool/.system/cores 11.9M 3.38T 11.9M legacy
sivesstorage_pool/.system/rrd-5ece5c906a8f4df886779fae5cade8a5 96K 3.38T 96K legacy
sivesstorage_pool/.system/samba4 2.86M 3.38T 2.86M legacy
sivesstorage_pool/.system/syslog-5ece5c906a8f4df886779fae5cade8a5 27.4M 3.38T 27.4M legacy
sivesstorage_pool/jails 96K 3.38T 96K /mnt/sivesstorage_pool/jails
sivesstorage_pool/sivesfiles 138G 3.38T 138G /mnt/sivesstorage_pool/sivesfiles
[root@midnight-freenas] ~#


tried

[root@midnight-freenas] ~# gpart add -t sivesstorage_pool /dev/da2
gpart: Invalid argument
[root@midnight-freenas] ~# gpart add -i 1 -t sivesstorage_pool /dev/da2
gpart: Invalid argument
[root@midnight-freenas] ~# gpart add -t sivesstorage_pool /dev/da2

it cant be that difficult ?
 

alienkidmj12

Dabbler
Joined
Oct 16, 2014
Messages
12
[root@midnight-freenas] ~# gpart list da1
Geom name: da1
modified: false
state: OK
fwheads: 255
fwsectors: 63
last: 7814037134
first: 34
entries: 128
scheme: GPT
Providers:
1. Name: da1p1
Mediasize: 2147483648 (2.0G)
Sectorsize: 512
Stripesize: 0
Stripeoffset: 65536
Mode: r1w1e1
rawuuid: 621bb3f1-5be3-11e5-98d6-000c2972bceb
rawtype: 516e7cb5-6ecf-11d6-8ff8-00022d09712b
label: 1
length: 2147483648
offset: 65536
type: freebsd-swap
index: 1
end: 4194431
start: 128
2. Name: da1p2
Mediasize: 3998639460352 (3.7T)
Sectorsize: 512
Stripesize: 0
Stripeoffset: 2147549184
Mode: r1w1e2
rawuuid: 622c5f80-5be3-11e5-98d6-000c2972bceb
rawtype: 516e7cba-6ecf-11d6-8ff8-00022d09712b
label: 1
length: 3998639460352
offset: 2147549184
type: freebsd-zfs
index: 2
end: 7814037127
start: 4194432
Consumers:
1. Name: da1
Mediasize: 4000787030016 (3.7T)
Sectorsize: 512
Mode: r2w2e5

[root@midnight-freenas] ~# gpart list da2
Geom name: da2
modified: false
state: OK
fwheads: 255
fwsectors: 63
last: 7814037134
first: 34
entries: 128
scheme: GPT
Consumers:
1. Name: da2
Mediasize: 4000787030016 (3.7T)
Sectorsize: 512
Mode: r0w0e0

[root@midnight-freenas] ~#
 

alienkidmj12

Dabbler
Joined
Oct 16, 2014
Messages
12
figured out how to create the zpools, but cant get it exact, i ran out of room . woot

[root@midnight-freenas] ~# gpart add -b 128 -i 1 -t freebsd-swap -s 2G da2
da2p1 added
[root@midnight-freenas] ~# gpart list da1
Geom name: da1
modified: false
state: OK
fwheads: 255
fwsectors: 63
last: 7814037134
first: 34
entries: 128
scheme: GPT
Providers:
1. Name: da1p1
Mediasize: 2147483648 (2.0G)
Sectorsize: 512
Stripesize: 0
Stripeoffset: 65536
Mode: r1w1e1
rawuuid: 621bb3f1-5be3-11e5-98d6-000c2972bceb
rawtype: 516e7cb5-6ecf-11d6-8ff8-00022d09712b
label: 1
length: 2147483648
offset: 65536
type: freebsd-swap
index: 1
end: 4194431
start: 128
2. Name: da1p2
Mediasize: 3998639460352 (3.7T)
Sectorsize: 512
Stripesize: 0
Stripeoffset: 2147549184
Mode: r1w1e2
rawuuid: 622c5f80-5be3-11e5-98d6-000c2972bceb
rawtype: 516e7cba-6ecf-11d6-8ff8-00022d09712b
label: 1
length: 3998639460352
offset: 2147549184
type: freebsd-zfs
index: 2
end: 7814037127
start: 4194432
Consumers:
1. Name: da1
Mediasize: 4000787030016 (3.7T)
Sectorsize: 512
Mode: r2w2e5

[root@midnight-freenas] ~# gpart list da2
Geom name: da2
modified: false
state: OK
fwheads: 255
fwsectors: 63
last: 7814037134
first: 34
entries: 128
scheme: GPT
Providers:
1. Name: da2p1
Mediasize: 2147483648 (2.0G)
Sectorsize: 512
Stripesize: 0
Stripeoffset: 65536
Mode: r0w0e0
rawuuid: 567d1827-6162-11e5-8385-000c2972bceb
rawtype: 516e7cb5-6ecf-11d6-8ff8-00022d09712b
label: 1
length: 2147483648
offset: 65536
type: freebsd-swap
index: 1
end: 4194431
start: 128
Consumers:
1. Name: da2
Mediasize: 4000787030016 (3.7T)
Sectorsize: 512
Mode: r0w0e0

[root@midnight-freenas] ~# gpart add -i 2 -t freebsd-zfs -s 3998639460352 da2
gpart: autofill: No space left on device
[root@midnight-freenas] ~#
 

alienkidmj12

Dabbler
Joined
Oct 16, 2014
Messages
12
all fixed ;)

[root@midnight-freenas] ~# zpool status
pool: freenas-boot
state: ONLINE
scan: none requested
config:

NAME STATE READ WRITE CKSUM
freenas-boot ONLINE 0 0 0
da0p2 ONLINE 0 0 0

errors: No known data errors

pool: sivesstorage_pool
state: ONLINE
status: One or more devices is currently being resilvered. The pool will
continue to function, possibly in a degraded state.
action: Wait for the resilver to complete.
scan: resilver in progress since Tue Sep 22 21:09:41 2015
1.41G scanned out of 138G at 111M/s, 0h20m to go
1.41G resilvered, 1.02% done
config:

NAME STATE READ WRITE CKSUM
sivesstorage_pool ONLINE 0 0 0
mirror-0 ONLINE 0 0 0
gptid/622c5f80-5be3-11e5-98d6-000c2972bceb ONLINE 0 0 0
gptid/0d403359-6164-11e5-8385-000c2972bceb ONLINE 0 0 0 (resilvering)

errors: No known data errors
[root@midnight-freenas] ~#
 
Status
Not open for further replies.
Top