Lines Matching full:sd

82 +	sd->sd_wu_size = sizeof(struct sr_crypto_wu);
83 sd->sd_type = SR_MD_CRYPTO;
84 strlcpy(sd->sd_name, "CRYPTO", sizeof(sd->sd_name));
85 sd->sd_capabilities = SR_CAP_SYSTEM_DISK | SR_CAP_AUTO_ASSEMBLE;
87 sr_error(sd->sd_sc, "%s requires exactly one chunk",
88 sd->sd_name);
94 + sr_error(sd->sd_sc, "%s exceeds maximum size (%lli > %llu)",
95 + sd->sd_name, coerced_size, SR_CRYPTO_MAXSIZE);
106 - bcopy(data, sd->mds.mdd_crypto.scr_maskkey,
107 + memcpy(sd->mds.mdd_crypto.scr_maskkey, data,
108 sizeof(sd->mds.mdd_crypto.scr_maskkey));
119 struct sr_discipline *sd = wu->swu_dis;
129 DEVNAME(sd->sd_sc), wu, encrypt);
131 - mtx_enter(&sd->mds.mdd_crypto.scr_mutex);
132 - if ((crwu = TAILQ_FIRST(&sd->mds.mdd_crypto.scr_wus)) != NULL)
133 - TAILQ_REMOVE(&sd->mds.mdd_crypto.scr_wus, crwu, cr_link);
134 - mtx_leave(&sd->mds.mdd_crypto.scr_mutex);
187 crwu->cr_crp->crp_sid = sd->mds.mdd_crypto.scr_sid[keyndx];
206 - switch (sd->mds.mdd_crypto.scr_meta->scm_alg) {
216 + crd->crd_alg = sd->mds.mdd_crypto.scr_alg;
217 + crd->crd_klen = sd->mds.mdd_crypto.scr_klen;
218 crd->crd_key = sd->mds.mdd_crypto.scr_key[0];
239 - struct sr_discipline *sd = wu->swu_dis;
247 - mtx_enter(&sd->mds.mdd_crypto.scr_mutex);
248 - TAILQ_INSERT_TAIL(&sd->mds.mdd_crypto.scr_wus, crwu, cr_link);
249 - mtx_leave(&sd->mds.mdd_crypto.scr_mutex);
254 if (sizeof(sd->mds.mdd_crypto.scr_meta->scm_kdfhint) <
258 - sd->mds.mdd_crypto.scr_meta->scm_kdfhint,
260 + memcpy(sd->mds.mdd_crypto.scr_meta->scm_kdfhint,
266 if (sizeof(sd->mds.mdd_crypto.scr_maskkey) <
269 - bcopy(&kdfinfo->maskkey, sd->mds.mdd_crypto.scr_maskkey,
270 + memcpy(sd->mds.mdd_crypto.scr_maskkey, &kdfinfo->maskkey,
302 sr_error(sd->sd_sc, "incorrect key or passphrase");
310 + sizeof(sd->mds.mdd_crypto.scr_meta->scm_kdfhint))
312 + explicit_bzero(sd->mds.mdd_crypto.scr_meta->scm_kdfhint,
313 + sizeof(sd->mds.mdd_crypto.scr_meta->scm_kdfhint));
314 + memcpy(sd->mds.mdd_crypto.scr_meta->scm_kdfhint,
320 sizeof(sd->mds.mdd_crypto.scr_key), check_digest);
323 - bcopy(check_digest, sd->mds.mdd_crypto.scr_meta->chk_hmac_sha1.sch_mac,
324 + memcpy(sd->mds.mdd_crypto.scr_meta->chk_hmac_sha1.sch_mac, check_digest,
325 sizeof(sd->mds.mdd_crypto.scr_meta->chk_hmac_sha1.sch_mac));
373 - bcopy(&sd->sd_meta->ssdi.ssd_uuid, &km->scmi.scm_uuid,
374 + memcpy(&km->scmi.scm_uuid, &sd->sd_meta->ssdi.ssd_uuid,
382 - bcopy(&sd->sd_meta->ssdi.ssd_uuid, &sm->ssdi.ssd_uuid,
383 + memcpy(&sm->ssdi.ssd_uuid, &sd->sd_meta->ssdi.ssd_uuid,
391 - bcopy(sd->mds.mdd_crypto.scr_maskkey, &skm->skm_maskkey,
392 + memcpy(&skm->skm_maskkey, sd->mds.mdd_crypto.scr_maskkey,
456 if (sr_meta_native_read(sd, dev, sm, NULL)) {
473 - sd->mds.mdd_crypto.scr_maskkey,
474 + memcpy(sd->mds.mdd_crypto.scr_maskkey, &skm->skm_maskkey,
475 sizeof(sd->mds.mdd_crypto.scr_maskkey));
479 - sd->mds.mdd_crypto.scr_maskkey,
480 + memcpy(sd->mds.mdd_crypto.scr_maskkey,
482 sizeof(sd->mds.mdd_crypto.scr_maskkey));
512 +sr_crypto_free_sessions(struct sr_discipline *sd)
517 + if (sd->mds.mdd_crypto.scr_sid[i] != (u_int64_t)-1) {
518 + crypto_freesession(sd->mds.mdd_crypto.scr_sid[i]);
519 + sd->mds.mdd_crypto.scr_sid[i] = (u_int64_t)-1;
525 sr_crypto_alloc_resources(struct sr_discipline *sd)
534 DEVNAME(sd->sd_sc));
536 + sd->mds.mdd_crypto.scr_alg = CRYPTO_AES_XTS;
537 + switch (sd->mds.mdd_crypto.scr_meta->scm_alg) {
539 + sd->mds.mdd_crypto.scr_klen = 256;
542 + sd->mds.mdd_crypto.scr_klen = 512;
545 + sr_error(sd->sd_sc, "unknown crypto algorithm");
550 sd->mds.mdd_crypto.scr_sid[i] = (u_int64_t)-1;
564 - mtx_init(&sd->mds.mdd_crypto.scr_mutex, IPL_BIO);
565 - TAILQ_INIT(&sd->mds.mdd_crypto.scr_wus);
566 - for (i = 0; i < sd->sd_max_wu; i++) {
572 - mtx_enter(&sd->mds.mdd_crypto.scr_mutex);
573 - TAILQ_INSERT_TAIL(&sd->mds.mdd_crypto.scr_wus, crwu, cr_link);
574 - mtx_leave(&sd->mds.mdd_crypto.scr_mutex);
576 + TAILQ_FOREACH(wu, &sd->sd_wu, swu_next) {
590 - switch (sd->mds.mdd_crypto.scr_meta->scm_alg) {
601 + cri.cri_alg = sd->mds.mdd_crypto.scr_alg;
602 + cri.cri_klen = sd->mds.mdd_crypto.scr_klen;
605 - num_keys = sd->sd_meta->ssdi.ssd_size >> SR_CRYPTO_KEY_BLKSHIFT;
608 + num_keys = ((sd->sd_meta->ssdi.ssd_size - 1) >>
614 cri.cri_key = sd->mds.mdd_crypto.scr_key[i];
615 if (crypto_newsession(&sd->mds.mdd_crypto.scr_sid[i],
618 - sd->mds.mdd_crypto.scr_sid[i] != (u_int64_t)-1;
621 - sd->mds.mdd_crypto.scr_sid[i]);
622 - sd->mds.mdd_crypto.scr_sid[i] = (u_int64_t)-1;
624 + sr_crypto_free_sessions(sd);
630 sr_crypto_free_resources(struct sr_discipline *sd)
637 DEVNAME(sd->sd_sc));
639 if (sd->mds.mdd_crypto.key_disk != NULL) {
640 - explicit_bzero(sd->mds.mdd_crypto.key_disk, sizeof
641 - sd->mds.mdd_crypto.key_disk);
642 - free(sd->mds.mdd_crypto.key_disk, M_DEVBUF);
643 + explicit_bzero(sd->mds.mdd_crypto.key_disk,
644 + sizeof(*sd->mds.mdd_crypto.key_disk));
645 + free(sd->mds.mdd_crypto.key_disk, M_DEVBUF,
646 + sizeof(*sd->mds.mdd_crypto.key_disk));
649 sr_hotplug_unregister(sd, sr_crypto_hotplug);
651 - for (i = 0; sd->mds.mdd_crypto.scr_sid[i] != (u_int64_t)-1; i++) {
652 - crypto_freesession(sd->mds.mdd_crypto.scr_sid[i]);
653 - sd->mds.mdd_crypto.scr_sid[i] = (u_int64_t)-1;
655 + sr_crypto_free_sessions(sd);
657 - mtx_enter(&sd->mds.mdd_crypto.scr_mutex);
658 - while ((crwu = TAILQ_FIRST(&sd->mds.mdd_crypto.scr_wus)) != NULL) {
659 - TAILQ_REMOVE(&sd->mds.mdd_crypto.scr_wus, crwu, cr_link);
662 + TAILQ_FOREACH(wu, &sd->sd_wu, swu_next) {
674 - mtx_leave(&sd->mds.mdd_crypto.scr_mutex);
676 sr_wu_free(sd);
677 sr_ccb_free(sd);
745 struct sr_discipline *sd = wu->swu_dis;
757 - blk += sd->sd_meta->ssd_data_offset;
759 - ccb = sr_ccb_rw(sd, 0, blk, xs->datalen, xs->data, xs->flags, 0);
760 + ccb = sr_ccb_rw(sd, 0, blkno, xs->datalen, xs->data, xs->flags, 0);
822 - struct sr_discipline *sd = wu->swu_dis;
826 - struct sr_softc *sc = sd->sd_sc;
835 - panic("%s: sr_crypto_finish_io", DEVNAME(sd->sd_sc));
842 - sr_scsi_done(sd, xs);