次の方法があります
var photos = [MWPhoto] = [MWPhoto]()
func numberOfPhotosInPhotoBrowser(photoBrowser: MWPhotoBrowser!) -> UInt {
return self.photos.count
}
func photoBrowser(photoBrowser: MWPhotoBrowser!, photoAtIndex index: UInt) -> MWPhotoProtocol! {
return self.photos[index]
}
ただし、最初はInt is not convertible to UInt
を取得します(self.photos.count
はInt
であるため
2番目のUInt is not convertible to Int
-self.photos[
はインデックスのIntのみを取ることができるため.
UIntをIntに正しく変換して戻すにはどうすればよいですか?
最初の例では、戻り値の型はIntですが、countはIntを返すため、Intを返します。
基本的に、UIntには初期化子があり、Int、CGFloat、Double、またはイベント文字列などの値型引数のバリアントを受け取り、新しい値型を返します。
-
func numberOfPhotosInPhotoBrowser(photoBrowser: MWPhotoBrowser!) -> UInt {
return UInt(self.photos.count)
}
2番目の場合、配列添え字は、Intを渡すInt値を予期するため、Intから新しいInt値型を作成します。
func photoBrowser(photoBrowser: MWPhotoBrowser!, photoAtIndex index: UInt) -> MWPhotoProtocol! {
return self.photos[Int(index)]
}
// initializing Int
var someInt: Int = 8
someInt
// Converting Int to UInt
var someIntToUInt: UInt = UInt(someInt)
someIntToUInt
// initializing UInt
var someUInt: UInt = 10
someUInt
// Converting UInt to Int
var someUIntToInt: Int = Int(someUInt)
someUIntToInt
負の値からunsigned intが必要な場合は、UInt(bitPattern :)を使用します
let intVal = -1
let uintVal = UInt(bitPattern: intVal) // uintVal == 0xffffffffffffffff
これをクラス外のどこかに追加します。
extension UInt {
/// SwiftExtensionKit
var toInt: Int { return Int(self) }
}
それから電話してください:
self.photos[index].toInt
Swiftの暗号メソッドパラメーターbitPattern:およびtruncatingBitPattern:そして、いつどれを使用するかを思い出せないので、多数の変換メソッドを含む次のクラスを作成しました。
これをプログラムに含めることを必ずしも推奨するわけではありません。多くの人が、Swiftは私たちを自分自身から守ろうとしているので、その努力を妨害するのは愚かなことだと言うでしょう。だから、このファイルを一種のチートシートとして保管してください。変換方法をすばやく決定し、必要に応じてパラメーターをプログラムにコピーできます。
ちなみに、[〜#〜] jdi [〜#〜]は「ちょうどやる」の略です。
/// Class containing a large number of static methods to convert an Int to a UInt or vice-versa, and
/// also to perform conversions between different bit sizes, for example UInt32 to UInt8.
///
/// Many of these "conversions" are trivial, and are only included for the sake of completeness.
///
/// A few of the conversions involving Int and UInt can give different results when run on 32-bit
/// and 64-bit systems. All of the conversion where the bit size of both the source and the target
/// are specified will always give the same result independent of platform.
public class JDI {
// MARK: - To signed Int
// To Int8
public static func ToInt8(_ x : Int8) -> Int8 {
return x
}
public static func ToInt8(_ x : Int32) -> Int8 {
return Int8(truncatingBitPattern: x)
}
public static func ToInt8(_ x : Int64) -> Int8 {
return Int8(truncatingBitPattern: x)
}
public static func ToInt8(_ x : Int) -> Int8 {
return Int8(truncatingBitPattern: x)
}
public static func ToInt8(_ x : UInt8) -> Int8 {
return Int8(bitPattern: x)
}
public static func ToInt8(_ x : UInt32) -> Int8 {
return Int8(truncatingBitPattern: x)
}
public static func ToInt8(_ x : UInt64) -> Int8 {
return Int8(truncatingBitPattern: x)
}
public static func ToInt8(_ x : UInt) -> Int8 {
return Int8(truncatingBitPattern: x)
}
// To Int32
public static func ToInt32(_ x : Int8) -> Int32 {
return Int32(x)
}
public static func ToInt32(_ x : Int32) -> Int32 {
return x
}
public static func ToInt32(_ x : Int64) -> Int32 {
return Int32(truncatingBitPattern: x)
}
public static func ToInt32(_ x : Int) -> Int32 {
return Int32(truncatingBitPattern: x)
}
public static func ToInt32(_ x : UInt8) -> Int32 {
return Int32(x)
}
public static func ToInt32(_ x : UInt32) -> Int32 {
return Int32(bitPattern: x)
}
public static func ToInt32(_ x : UInt64) -> Int32 {
return Int32(truncatingBitPattern: x)
}
public static func ToInt32(_ x : UInt) -> Int32 {
return Int32(truncatingBitPattern: x)
}
// To Int64
public static func ToInt64(_ x : Int8) -> Int64 {
return Int64(x)
}
public static func ToInt64(_ x : Int32) -> Int64 {
return Int64(x)
}
public static func ToInt64(_ x : Int64) -> Int64 {
return x
}
public static func ToInt64(_ x : Int) -> Int64 {
return Int64(x)
}
public static func ToInt64(_ x : UInt8) -> Int64 {
return Int64(x)
}
public static func ToInt64(_ x : UInt32) -> Int64 {
return Int64(x)
}
public static func ToInt64(_ x : UInt64) -> Int64 {
return Int64(bitPattern: x)
}
public static func ToInt64(_ x : UInt) -> Int64 {
return Int64(bitPattern: UInt64(x)) // Does not extend high bit of 32-bit input
}
// To Int
public static func ToInt(_ x : Int8) -> Int {
return Int(x)
}
public static func ToInt(_ x : Int32) -> Int {
return Int(x)
}
public static func ToInt(_ x : Int64) -> Int {
return Int(truncatingBitPattern: x)
}
public static func ToInt(_ x : Int) -> Int {
return x
}
public static func ToInt(_ x : UInt8) -> Int {
return Int(x)
}
public static func ToInt(_ x : UInt32) -> Int {
if MemoryLayout<Int>.size == MemoryLayout<Int32>.size {
return Int(Int32(bitPattern: x)) // For 32-bit systems, non-authorized interpretation
}
return Int(x)
}
public static func ToInt(_ x : UInt64) -> Int {
return Int(truncatingBitPattern: x)
}
public static func ToInt(_ x : UInt) -> Int {
return Int(bitPattern: x)
}
// MARK: - To unsigned Int
// To UInt8
public static func ToUInt8(_ x : Int8) -> UInt8 {
return UInt8(bitPattern: x)
}
public static func ToUInt8(_ x : Int32) -> UInt8 {
return UInt8(truncatingBitPattern: x)
}
public static func ToUInt8(_ x : Int64) -> UInt8 {
return UInt8(truncatingBitPattern: x)
}
public static func ToUInt8(_ x : Int) -> UInt8 {
return UInt8(truncatingBitPattern: x)
}
public static func ToUInt8(_ x : UInt8) -> UInt8 {
return x
}
public static func ToUInt8(_ x : UInt32) -> UInt8 {
return UInt8(truncatingBitPattern: x)
}
public static func ToUInt8(_ x : UInt64) -> UInt8 {
return UInt8(truncatingBitPattern: x)
}
public static func ToUInt8(_ x : UInt) -> UInt8 {
return UInt8(truncatingBitPattern: x)
}
// To UInt32
public static func ToUInt32(_ x : Int8) -> UInt32 {
return UInt32(bitPattern: Int32(x)) // Extend sign bit, assume minus input significant
}
public static func ToUInt32(_ x : Int32) -> UInt32 {
return UInt32(bitPattern: x)
}
public static func ToUInt32(_ x : Int64) -> UInt32 {
return UInt32(truncatingBitPattern: x)
}
public static func ToUInt32(_ x : Int) -> UInt32 {
return UInt32(truncatingBitPattern: x)
}
public static func ToUInt32(_ x : UInt8) -> UInt32 {
return UInt32(x)
}
public static func ToUInt32(_ x : UInt32) -> UInt32 {
return x
}
public static func ToUInt32(_ x : UInt64) -> UInt32 {
return UInt32(truncatingBitPattern: x)
}
public static func ToUInt32(_ x : UInt) -> UInt32 {
return UInt32(truncatingBitPattern: x)
}
// To UInt64
public static func ToUInt64(_ x : Int8) -> UInt64 {
return UInt64(bitPattern: Int64(x)) // Extend sign bit, assume minus input significant
}
public static func ToUInt64(_ x : Int32) -> UInt64 {
return UInt64(bitPattern: Int64(x)) // Extend sign bit, assume minus input significant
}
public static func ToUInt64(_ x : Int64) -> UInt64 {
return UInt64(bitPattern: x)
}
public static func ToUInt64(_ x : Int) -> UInt64 {
return UInt64(bitPattern: Int64(x)) // Extend sign bit if necessary, assume minus input significant
}
public static func ToUInt64(_ x : UInt8) -> UInt64 {
return UInt64(x)
}
public static func ToUInt64(_ x : UInt32) -> UInt64 {
return UInt64(x)
}
public static func ToUInt64(_ x : UInt64) -> UInt64 {
return x
}
public static func ToUInt64(_ x : UInt) -> UInt64 {
return UInt64(x) // Does not extend high bit of 32-bit input
}
// To UInt
public static func ToUInt(_ x : Int8) -> UInt {
return UInt(bitPattern: Int(x)) // Extend sign bit, assume minus input significant
}
public static func ToUInt(_ x : Int32) -> UInt {
return UInt(truncatingBitPattern: Int64(x)) // Extend sign bit, assume minus input significant
}
public static func ToUInt(_ x : Int64) -> UInt {
return UInt(truncatingBitPattern: x)
}
public static func ToUInt(_ x : Int) -> UInt {
return UInt(bitPattern: x)
}
public static func ToUInt(_ x : UInt8) -> UInt {
return UInt(x)
}
public static func ToUInt(_ x : UInt32) -> UInt {
return UInt(x)
}
public static func ToUInt(_ x : UInt64) -> UInt {
return UInt(truncatingBitPattern: x)
}
public static func ToUInt(_ x : UInt) -> UInt {
return x
}
}
テストコードは次のとおりです。
public func doTest() {
// To Int8
assert(JDI.ToInt8(42 as Int8) == 42)
assert(JDI.ToInt8(-13 as Int8) == -13)
assert(JDI.ToInt8(42 as Int32) == 42)
assert(JDI.ToInt8(257 as Int32) == 1)
assert(JDI.ToInt8(42 as Int64) == 42)
assert(JDI.ToInt8(257 as Int64) == 1)
assert(JDI.ToInt8(42 as Int) == 42)
assert(JDI.ToInt8(257 as Int) == 1)
assert(JDI.ToInt8(42 as UInt8) == 42)
assert(JDI.ToInt8(0xf3 as UInt8) == -13)
assert(JDI.ToInt8(42 as UInt32) == 42)
assert(JDI.ToInt8(0xfffffff3 as UInt32) == -13)
assert(JDI.ToInt8(42 as UInt64) == 42)
assert(JDI.ToInt8(UInt64.max - 12) == -13)
assert(JDI.ToInt8(42 as UInt) == 42)
assert(JDI.ToInt8(UInt.max - 12) == -13)
// To Int32
assert(JDI.ToInt32(42 as Int8) == 42)
assert(JDI.ToInt32(-13 as Int8) == -13)
assert(JDI.ToInt32(42 as Int32) == 42)
assert(JDI.ToInt32(-13 as Int32) == -13)
assert(JDI.ToInt32(42 as Int64) == 42)
assert(JDI.ToInt32(Int64(Int32.min) - 1) == Int32.max)
assert(JDI.ToInt32(42 as Int) == 42)
assert(JDI.ToInt32(-13 as Int) == -13)
assert(JDI.ToInt32(42 as UInt8) == 42)
assert(JDI.ToInt32(0xf3 as UInt8) == 243)
assert(JDI.ToInt32(42 as UInt32) == 42)
assert(JDI.ToInt32(0xfffffff3 as UInt32) == -13)
assert(JDI.ToInt32(42 as UInt64) == 42)
assert(JDI.ToInt32(UInt64.max - 12) == -13)
assert(JDI.ToInt32(42 as UInt) == 42)
assert(JDI.ToInt32(UInt.max - 12) == -13)
// To Int64
assert(JDI.ToInt64(42 as Int8) == 42)
assert(JDI.ToInt64(-13 as Int8) == -13)
assert(JDI.ToInt64(42 as Int32) == 42)
assert(JDI.ToInt64(-13 as Int32) == -13)
assert(JDI.ToInt64(42 as Int64) == 42)
assert(JDI.ToInt64(-13 as Int64) == -13)
assert(JDI.ToInt64(42 as Int) == 42)
assert(JDI.ToInt64(-13 as Int) == -13)
assert(JDI.ToInt64(42 as UInt8) == 42)
assert(JDI.ToInt64(0xf3 as UInt8) == 243)
assert(JDI.ToInt64(42 as UInt32) == 42)
assert(JDI.ToInt64(0xfffffff3 as UInt32) == 4294967283)
assert(JDI.ToInt64(42 as UInt64) == 42)
assert(JDI.ToInt64(UInt64.max - 12) == -13)
assert(JDI.ToInt64(42 as UInt) == 42)
#if (Arch(i386) || Arch(arm))
assert(JDI.ToInt64(UInt.max - 12) == 4294967283) // For 32-bit systems
#else
assert(JDI.ToInt64(UInt.max - 12) == -13) // For 64-bit systems
#endif
// To Int
assert(JDI.ToInt(42 as Int8) == 42)
assert(JDI.ToInt(-13 as Int8) == -13)
assert(JDI.ToInt(42 as Int32) == 42)
assert(JDI.ToInt(-13 as Int32) == -13)
assert(JDI.ToInt(42 as Int64) == 42)
assert(JDI.ToInt(-13 as Int64) == -13)
assert(JDI.ToInt(42 as Int) == 42)
assert(JDI.ToInt(-13 as Int) == -13)
assert(JDI.ToInt(42 as UInt8) == 42)
assert(JDI.ToInt(0xf3 as UInt8) == 243)
assert(JDI.ToInt(42 as UInt32) == 42)
#if (Arch(i386) || Arch(arm))
assert(JDI.ToInt(0xfffffff3 as UInt32) == -13) // For 32-bit systems
#else
assert(JDI.ToInt(0xfffffff3 as UInt32) == 4294967283) // For 64-bit systems
#endif
assert(JDI.ToInt(42 as UInt64) == 42)
assert(JDI.ToInt(UInt64.max - 12) == -13)
assert(JDI.ToInt(42 as UInt) == 42)
assert(JDI.ToInt(UInt.max - 12) == -13)
// To UInt8
assert(JDI.ToUInt8(42 as Int8) == 42)
assert(JDI.ToUInt8(-13 as Int8) == 0xf3)
assert(JDI.ToUInt8(42 as Int32) == 42)
assert(JDI.ToUInt8(-13 as Int32) == 0xf3)
assert(JDI.ToUInt8(42 as Int64) == 42)
assert(JDI.ToUInt8(-13 as Int64) == 0xf3)
assert(JDI.ToUInt8(Int64.max - 12) == 0xf3)
assert(JDI.ToUInt8(42 as Int) == 42)
assert(JDI.ToUInt8(-13 as Int) == 0xf3)
assert(JDI.ToUInt8(Int.max - 12) == 0xf3)
assert(JDI.ToUInt8(42 as UInt8) == 42)
assert(JDI.ToUInt8(0xf3 as UInt8) == 0xf3)
assert(JDI.ToUInt8(42 as UInt32) == 42)
assert(JDI.ToUInt8(0xfffffff3 as UInt32) == 0xf3)
assert(JDI.ToUInt8(42 as UInt64) == 42)
assert(JDI.ToUInt8(UInt64.max - 12) == 0xf3)
assert(JDI.ToUInt8(42 as UInt) == 42)
assert(JDI.ToUInt8(UInt.max - 12) == 0xf3)
// To UInt32
assert(JDI.ToUInt32(42 as Int8) == 42)
assert(JDI.ToUInt32(-13 as Int8) == 0xfffffff3)
assert(JDI.ToUInt32(42 as Int32) == 42)
assert(JDI.ToUInt32(-13 as Int32) == 0xfffffff3)
assert(JDI.ToUInt32(42 as Int64) == 42)
assert(JDI.ToUInt32(-13 as Int64) == 0xfffffff3)
assert(JDI.ToUInt32(Int64.max - 12) == 0xfffffff3)
assert(JDI.ToUInt32(42 as Int) == 42)
assert(JDI.ToUInt32(-13 as Int) == 0xfffffff3)
#if (Arch(i386) || Arch(arm))
assert(JDI.ToUInt32(Int.max - 12) == 0x7ffffff3) // For 32-bit systems
#else
assert(JDI.ToUInt32(Int.max - 12) == 0xfffffff3) // For 64-bit systems
#endif
assert(JDI.ToUInt32(42 as UInt8) == 42)
assert(JDI.ToUInt32(0xf3 as UInt8) == 0xf3)
assert(JDI.ToUInt32(42 as UInt32) == 42)
assert(JDI.ToUInt32(0xfffffff3 as UInt32) == 0xfffffff3)
assert(JDI.ToUInt32(42 as UInt64) == 42)
assert(JDI.ToUInt32(UInt64.max - 12) == 0xfffffff3)
assert(JDI.ToUInt32(42 as UInt) == 42)
assert(JDI.ToUInt32(UInt.max - 12) == 0xfffffff3)
// To UInt64
assert(JDI.ToUInt64(42 as Int8) == 42)
assert(JDI.ToUInt64(-13 as Int8) == 0xfffffffffffffff3)
assert(JDI.ToUInt64(42 as Int32) == 42)
assert(JDI.ToUInt64(-13 as Int32) == 0xfffffffffffffff3)
assert(JDI.ToUInt64(42 as Int64) == 42)
assert(JDI.ToUInt64(-13 as Int64) == 0xfffffffffffffff3)
assert(JDI.ToUInt64(Int64.max - 12) == (UInt64.max >> 1) - 12)
assert(JDI.ToUInt64(42 as Int) == 42)
assert(JDI.ToUInt64(-13 as Int) == 0xfffffffffffffff3)
#if (Arch(i386) || Arch(arm))
assert(JDI.ToUInt64(Int.max - 12) == 0x7ffffff3) // For 32-bit systems
#else
assert(JDI.ToUInt64(Int.max - 12) == 0x7ffffffffffffff3) // For 64-bit systems
#endif
assert(JDI.ToUInt64(42 as UInt8) == 42)
assert(JDI.ToUInt64(0xf3 as UInt8) == 0xf3)
assert(JDI.ToUInt64(42 as UInt32) == 42)
assert(JDI.ToUInt64(0xfffffff3 as UInt32) == 0xfffffff3)
assert(JDI.ToUInt64(42 as UInt64) == 42)
assert(JDI.ToUInt64(UInt64.max - 12) == 0xfffffffffffffff3)
assert(JDI.ToUInt64(42 as UInt) == 42)
#if (Arch(i386) || Arch(arm))
assert(JDI.ToUInt64(UInt.max - 12) == 0xfffffff3) // For 32-bit systems
#else
assert(JDI.ToUInt64(UInt.max - 12) == 0xfffffffffffffff3) // For 64-bit systems
#endif
// To UInt
assert(JDI.ToUInt(42 as Int8) == 42)
#if (Arch(i386) || Arch(arm))
assert(JDI.ToUInt(-13 as Int8) == 0xfffffff3) // For 32-bit systems
#else
assert(JDI.ToUInt(-13 as Int8) == 0xfffffffffffffff3) // For 64-bit systems
#endif
assert(JDI.ToUInt(42 as Int32) == 42)
#if (Arch(i386) || Arch(arm))
assert(JDI.ToUInt(-13 as Int32) == 0xfffffff3) // For 32-bit systems
#else
assert(JDI.ToUInt(-13 as Int32) == 0xfffffffffffffff3) // For 64-bit systems
#endif
assert(JDI.ToUInt(42 as Int64) == 42)
#if (Arch(i386) || Arch(arm))
assert(JDI.ToUInt(-13 as Int64) == 0xfffffff3) // For 32-bit systems
assert(JDI.ToUInt(Int64.max - 12) == 0xfffffff3)
#else
assert(JDI.ToUInt(-13 as Int64) == 0xfffffffffffffff3) // For 64-bit systems
assert(JDI.ToUInt(Int64.max - 12) == 0x7ffffffffffffff3)
#endif
assert(JDI.ToUInt(42 as Int) == 42)
#if (Arch(i386) || Arch(arm))
assert(JDI.ToUInt(Int.max - 12) == 0x7ffffff3) // For 32-bit systems
#else
assert(JDI.ToUInt(Int.max - 12) == 0x7ffffffffffffff3) // For 64-bit systems
#endif
assert(JDI.ToUInt(42 as UInt8) == 42)
assert(JDI.ToUInt(0xf3 as UInt8) == 0xf3)
assert(JDI.ToUInt(42 as UInt32) == 42)
assert(JDI.ToUInt(0xfffffff3 as UInt32) == 0xfffffff3)
assert(JDI.ToUInt(42 as UInt64) == 42)
#if (Arch(i386) || Arch(arm))
assert(JDI.ToUInt(UInt64.max - 12) == 0xfffffff3) // For 32-bit systems
#else
assert(JDI.ToUInt(UInt64.max - 12) == 0xfffffffffffffff3) // For 64-bit systems
#endif
assert(JDI.ToUInt(42 as UInt) == 42)
#if (Arch(i386) || Arch(arm))
assert(JDI.ToUInt(UInt.max - 12) == 0xfffffff3) // For 32-bit systems
#else
assert(JDI.ToUInt(UInt.max - 12) == 0xfffffffffffffff3) // For 64-bit systems
#endif
print("\nTesting JDI complete.\n")
}